diff --git a/.codecov.yml b/.codecov.yml deleted file mode 100644 index 762c4cc63c..0000000000 --- a/.codecov.yml +++ /dev/null @@ -1,4 +0,0 @@ -comment: - layout: "header, diff" - behavior: default - require_changes: no \ No newline at end of file diff --git a/.github/actions/run-integration-tests/action.yml b/.github/actions/run-integration-tests/action.yml index 61784bb6ba..a9cf4e0a72 100644 --- a/.github/actions/run-integration-tests/action.yml +++ b/.github/actions/run-integration-tests/action.yml @@ -5,7 +5,7 @@ inputs: TEST_SCOPE: description: "Set of flags that defines the test scope" required: false - default: "-t @GHA" + default: "-t @PR" IN_LEDGER_URL: description: "URL to the von network ledger browser" required: false @@ -27,7 +27,7 @@ runs: PUBLIC_TAILS_URL: ${{ inputs.IN_PUBLIC_TAILS_URL }} LOG_LEVEL: warning NO_TTY: "1" - working-directory: acapy/demo + working-directory: ./demo branding: icon: "mic" color: "purple" diff --git a/.github/actions/run-unit-tests/action.yml b/.github/actions/run-unit-tests/action.yml new file mode 100644 index 0000000000..9675c99bc0 --- /dev/null +++ b/.github/actions/run-unit-tests/action.yml @@ -0,0 +1,55 @@ +name: Run Unit Tests +description: "Run unit tests for the project" + +inputs: + python-version: + description: "Python version" + required: true + os: + description: "Operating system" + required: true + is_pr: + description: "Is this a PR?" + required: false + default: "true" + +runs: + using: "composite" + steps: + - name: Set up Python ${{ inputs.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python-version }} + cache: 'pip' + cache-dependency-path: 'requirements*.txt' + - name: Install the project dependencies + shell: bash + run: | + pip install poetry + poetry install --all-extras + - name: Tests + shell: bash + run: | + poetry run pytest --cov=aries_cloudagent --cov-report term-missing --cov-report xml --ignore-glob=/tests/* --ignore-glob=demo/* --ignore-glob=docker/* --ignore-glob=docs/* --ignore-glob=scripts/* 2>&1 | tee pytest.log + PYTEST_EXIT_CODE=${PIPESTATUS[0]} + if grep -Eq "RuntimeWarning: coroutine .* was never awaited" pytest.log; then + echo "Failure: Detected unawaited coroutine warning in pytest output." + exit 1 + fi + exit $PYTEST_EXIT_CODE + - name: Save PR number to file + if: inputs.is_pr == 'true' + shell: bash + run: echo ${{ github.event.number }} > PR_NUMBER + - name: Archive PR number + if: inputs.is_pr == 'true' + uses: actions/upload-artifact@v4 + with: + name: PR_NUMBER + path: PR_NUMBER + - name: Archive Test Results + if: inputs.is_pr == 'true' + uses: actions/upload-artifact@v4 + with: + name: TEST_COV + path: test-reports/coverage.xml diff --git a/.github/dependabot.yml b/.github/dependabot.yml index cd4692b79f..67ce87084c 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,10 +5,230 @@ updates: # Maintain dependencies for GitHub Actions # - Check for updates once a week # - Group all updates into a single PR - - package-ecosystem: github-actions - directory: / + - package-ecosystem: "github-actions" + directory: "/" schedule: - interval: weekly + interval: "weekly" groups: all-actions: - patterns: [ "*" ] \ No newline at end of file + patterns: [ "*" ] + + # Maintain dependencies for Python Packages + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + ignore: + - dependency-name: "*" + update-types: ["version-update:semver-major"] + + # Maintain dependencies for Python Packages + - package-ecosystem: "pip" + directory: "/demo/playground/examples" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + ignore: + - dependency-name: "*" + update-types: ["version-update:semver-major"] + + # Maintain dependencies for Python Packages + - package-ecosystem: "pip" + directory: "/demo" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + ignore: + - dependency-name: "*" + update-types: ["version-update:semver-major"] + + # Maintain dependencies for Python Packages + - package-ecosystem: "pip" + directory: "/docs" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + ignore: + - dependency-name: "*" + update-types: ["version-update:semver-major"] + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/.devcontainer" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/docker-agent" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/docker-test/db" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/elk-stack/elasticsearch" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/elk-stack/extensions/curator" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/elk-stack/extensions/enterprise-search" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/elk-stack/extensions/filebeat" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/elk-stack/extensions/fleet" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/elk-stack/extensions/heartbeat" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/elk-stack/extensions/logspout" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/elk-stack/extensions/metricbeat" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/elk-stack/kibana" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/elk-stack/logstash" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/elk-stack/setup" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/multi-demo" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/playground" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/demo/playground/examples" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for docker + - package-ecosystem: "docker" + directory: "/docker" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + + # Maintain dependencies for dev Container + - package-ecosystem: "devcontainers" + directory: "/.devcontainer" + schedule: + interval: "weekly" + day: "monday" + time: "04:00" + timezone: "Canada/Pacific" + diff --git a/.github/workflows/integrationtests.yml b/.github/workflows/integrationtests.yml index 6d2fca9349..b7e3eb8344 100644 --- a/.github/workflows/integrationtests.yml +++ b/.github/workflows/integrationtests.yml @@ -1,6 +1,8 @@ name: acapy-integration-tests on: + schedule: + - cron: '0 0 * * *' workflow_dispatch: pull_request: branches: @@ -18,17 +20,45 @@ jobs: test: runs-on: ubuntu-latest if: (github.event_name == 'pull_request' && github.repository == 'hyperledger/aries-cloudagent-python') || (github.event_name != 'pull_request') + outputs: + is_release: ${{ steps.check_if_release.outputs.is_release }} steps: - name: checkout-acapy uses: actions/checkout@v4 with: - path: acapy - #- name: run-von-network - # uses: ./acapy/.github/actions/run-von-network - #- name: run-indy-tails-server - # uses: ./acapy/.github/actions/run-indy-tails-server - - name: run-integration-tests - uses: ./acapy/.github/actions/run-integration-tests - # to run with a specific set of tests include the following parameter: - # with: - # TEST_SCOPE: "-t @T001-RFC0037" + fetch-depth: 0 + - name: Check if PR is a release + id: check_if_release + continue-on-error: true + run: | + # Get the diff between the current commit and the last merge commit on the upstream/main branch + git remote add upstream https://github.com/hyperledger/aries-cloudagent-python.git + git fetch upstream + + last_merge=$(git rev-list --no-merges -n 1 upstream/main) + + echo event = ${{ github.event_name }} + + echo last upstream commit = "$last_merge" + echo current pr commit = "${{ github.sha }}" + + echo Will exit with code 1 if the pull request is not a release + + changes=$(git diff "${{ github.sha }}" "$last_merge" pyproject.toml) + + # Extract the version of aries-cloudagent from the diff of pyproject.toml + version=$(echo "$changes" | grep -A1 'name = "aries_cloudagent"' | head -n 2 | tail -n 1 | awk '{print $3}' | tr -d '"') + + echo "$version" + if [ "$version" ]; then + echo "This is a release because the aries-cloudagent version in pyproject.toml has changes" + echo is_release=true >> $GITHUB_OUTPUT + fi + - name: run-pr-integration-tests + uses: ./.github/actions/run-integration-tests + if: (steps.check_if_release.outputs.is_release != 'true' && github.event_name == 'pull_request') + - name: run-release-or-cron-integration-tests + if: (steps.check_if_release.outputs.is_release == 'true' || github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') + uses: ./.github/actions/run-integration-tests + with: + TEST_SCOPE: "-t @Release" diff --git a/.github/workflows/nigthly.yml b/.github/workflows/nigthly.yml index c6e01b95ce..7427d28d21 100644 --- a/.github/workflows/nigthly.yml +++ b/.github/workflows/nigthly.yml @@ -7,17 +7,22 @@ on: jobs: tests: - if: github.repository == 'hyperledger/aries-cloudagent-python' || github.event_name == 'workflow_dispatch' - name: Tests + runs-on: ubuntu-latest strategy: fail-fast: false matrix: os: ["ubuntu-latest"] python-version: ["3.9", "3.10"] - uses: ./.github/workflows/tests.yml - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} + if: github.repository == 'hyperledger/aries-cloudagent-python' || github.event_name == 'workflow_dispatch' + steps: + - name: checkout + uses: actions/checkout@v4 + - name: Tests + uses: ./.github/actions/run-unit-tests + with: + python-version: ${{ matrix.python-version }} + os: ${{ matrix.os }} + is_pr: "false" setup_and_check_pub: name: Setup Publish diff --git a/.github/workflows/pr-tests.yml b/.github/workflows/pr-tests.yml index 5de0998c36..6813b88dc0 100644 --- a/.github/workflows/pr-tests.yml +++ b/.github/workflows/pr-tests.yml @@ -9,8 +9,13 @@ concurrency: jobs: tests: - name: Tests - uses: ./.github/workflows/tests.yml - with: - python-version: "3.9" - os: "ubuntu-latest" + runs-on: ubuntu-latest + steps: + - name: checkout + uses: actions/checkout@v4 + - name: Tests + uses: ./.github/actions/run-unit-tests + with: + python-version: "3.9" + os: "ubuntu-latest" + is_pr: "true" diff --git a/.github/workflows/publish-indy.yml b/.github/workflows/publish-indy.yml deleted file mode 100644 index 3d95cc6d84..0000000000 --- a/.github/workflows/publish-indy.yml +++ /dev/null @@ -1,113 +0,0 @@ -name: Publish ACA-Py Image (Indy) -run-name: Publish ACA-Py ${{ inputs.tag || github.event.release.tag_name }} Image (Indy ${{ inputs.indy_version || '1.16.0' }}) -on: - release: - types: [published] - - workflow_dispatch: - inputs: - indy_version: - description: 'Indy SDK Version' - required: true - default: 1.16.0 - type: string - tag: - description: 'Image tag' - required: true - type: string - platforms: - description: 'Platforms - Comma separated list of the platforms to support.' - required: true - default: linux/amd64 - type: string - ref: - description: 'Optional - The branch, tag or SHA to checkout.' - required: false - type: string - -# Note: -# - ACA-Py with Indy SDK image builds do not include support for the linux/arm64 platform. -# - See notes below for details. - -env: - INDY_VERSION: ${{ inputs.indy_version || '1.16.0' }} - - # Images do not include support for the linux/arm64 platform due to a known issue compiling the postgres plugin - # - https://github.com/hyperledger/indy-sdk/issues/2445 - # There is a pending PR to fix this issue here; https://github.com/hyperledger/indy-sdk/pull/2453 - # - # linux/386 platform support has been disabled pending a permanent fix for https://github.com/hyperledger/aries-cloudagent-python/issues/2124 - # PLATFORMS: ${{ inputs.platforms || 'linux/amd64,linux/386' }} - PLATFORMS: ${{ inputs.platforms || 'linux/amd64' }} - -jobs: - publish-image: - strategy: - fail-fast: false - matrix: - python-version: ['3.9'] - - name: Publish ACA-Py Image (Indy) - runs-on: ubuntu-latest - steps: - - name: Checkout Code - uses: actions/checkout@v4 - with: - ref: ${{ inputs.ref || '' }} - - - name: Gather image info - id: info - run: | - echo "repo-owner=${GITHUB_REPOSITORY_OWNER,,}" >> $GITHUB_OUTPUT - - - name: Cache Docker layers - uses: actions/cache@v4 - with: - path: /tmp/.buildx-cache - key: ${{ runner.os }}-buildx-${{ github.sha }} - restore-keys: | - ${{ runner.os }}-buildx- - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Log in to the GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.repository_owner }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Setup Image Metadata - id: meta - uses: docker/metadata-action@v5 - with: - images: | - ghcr.io/${{ steps.info.outputs.repo-owner }}/aries-cloudagent-python - tags: | - type=raw,value=py${{ matrix.python-version }}-indy-${{ env.INDY_VERSION }}-${{ inputs.tag || github.event.release.tag_name }} - - - name: Build and Push Image to ghcr.io - uses: docker/build-push-action@v5 - with: - push: true - context: . - file: docker/Dockerfile.indy - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - target: main - build-args: | - python_version=${{ matrix.python-version }} - indy_version=${{ env.INDY_VERSION }} - acapy_version=${{ inputs.tag || github.event.release.tag_name }} - cache-from: type=local,src=/tmp/.buildx-cache - cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max - platforms: ${{ env.PLATFORMS }} - - # Temp fix - # https://github.com/docker/build-push-action/issues/252 - # https://github.com/moby/buildkit/issues/1896 - - name: Move cache - run: | - rm -rf /tmp/.buildx-cache - mv /tmp/.buildx-cache-new /tmp/.buildx-cache diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml new file mode 100644 index 0000000000..37aa855c3c --- /dev/null +++ b/.github/workflows/scorecard.yml @@ -0,0 +1,76 @@ +# SPDX-License-Identifier: Apache-2.0 + +# This workflow uses actions that are not certified by GitHub. They are provided +# by a third-party and are governed by separate terms of service, privacy +# policy, and support documentation. + +name: Scorecard supply-chain security +on: + workflow_dispatch: + # For Branch-Protection check. Only the default branch is supported. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection + branch_protection_rule: + # To guarantee Maintained check is occasionally updated. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained + schedule: + - cron: '17 21 * * 4' + push: + branches: [ "main" ] + +# Declare default permissions as read only. +permissions: read-all + +jobs: + analysis: + name: Scorecard analysis + runs-on: ubuntu-latest + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results and get a badge (see publish_results below). + id-token: write + # Uncomment the permissions below if installing in a private repository. + # contents: read + # actions: read + + steps: + - name: "Checkout code" + uses: actions/checkout@v4 # was v4.1.1 - b4ffde65f46336ab88eb53be808477a3936bae11 + with: + persist-credentials: false + + - name: "Run analysis" + uses: ossf/scorecard-action@v2.3.3 # was v2.3.1 - 0864cf19026789058feabb7e87baa5f140aac736 + with: + results_file: results.sarif + results_format: sarif + # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: + # - you want to enable the Branch-Protection check on a *public* repository, or + # - you are installing Scorecard on a *private* repository + # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action?tab=readme-ov-file#authentication-with-fine-grained-pat-optional. + # repo_token: ${{ secrets.SCORECARD_TOKEN }} + + # Public repositories: + # - Publish results to OpenSSF REST API for easy access by consumers + # - Allows the repository to include the Scorecard badge. + # - See https://github.com/ossf/scorecard-action#publishing-results. + # For private repositories: + # - `publish_results` will always be set to `false`, regardless + # of the value entered here. + publish_results: true + + # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF + # format to the repository Actions tab. + - name: "Upload artifact" + uses: actions/upload-artifact@v4 # was v3.pre.node20 97a0fba1372883ab732affbe8f94b823f91727db + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard (optional). + # Commenting out will disable upload of results to your repo's Code Scanning dashboard + - name: "Upload to code-scanning" + uses: github/codeql-action/upload-sarif@v3 # was v3.24.9 - 1b1aada464948af03b950897e5eb522f92603cc2 + with: + sarif_file: results.sarif \ No newline at end of file diff --git a/.github/workflows/snyk.yml b/.github/workflows/snyk.yml index 30d997d594..57e3b01580 100644 --- a/.github/workflows/snyk.yml +++ b/.github/workflows/snyk.yml @@ -1,10 +1,11 @@ name: Snyk Container on: - pull_request: - branches: [main] + push: + branches: + - main paths: - - aries_cloudagent - - docker + - aries_cloudagent/** + - docker/** jobs: snyk: @@ -12,13 +13,15 @@ jobs: if: ${{ github.repository_owner == 'hyperledger' }} steps: - uses: actions/checkout@v4 + - name: Build a Docker image run: docker build -t aries-cloudagent -f docker/Dockerfile . + - name: Run Snyk to check Docker image for vulnerabilities # Snyk can be used to break the build when it detects vulnerabilities. # In this case we want to upload the issues to GitHub Code Scanning continue-on-error: true - uses: snyk/actions/docker@master + uses: snyk/actions/docker@0.4.0 env: # In order to use the Snyk Action you will need to have a Snyk API token. # More details in https://github.com/snyk/actions#getting-your-snyk-token @@ -27,6 +30,14 @@ jobs: with: image: aries-cloudagent args: --file=docker/Dockerfile + + # Replace any "null" security severity values with 0. The null value is used in the case + # of license-related findings, which do not do not indicate a security vulnerability. + # See https://github.com/github/codeql-action/issues/2187 for more context. + - name: Post process snyk sarif file + run: | + sed -i 's/"security-severity": "null"/"security-severity": "0"/g' snyk.sarif + - name: Upload result to GitHub Code Scanning uses: github/codeql-action/upload-sarif@v3 with: diff --git a/.github/workflows/sonar-merge-main.yml b/.github/workflows/sonar-merge-main.yml new file mode 100644 index 0000000000..9f00bc5f59 --- /dev/null +++ b/.github/workflows/sonar-merge-main.yml @@ -0,0 +1,34 @@ +name: Sonar Scan and Coverage +on: + push: + branches: + - main + +jobs: + sonarcloud: + name: SonarCloud + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Tests + uses: ./.github/actions/run-unit-tests + with: + python-version: "3.9" + os: "ubuntu-latest" + is_pr: "false" + - name: Adjust Test Coverage Source + run: | + # Need to change source in coverage report because it was generated from another context + sed -i 's/\/home\/runner\/work\/aries-cloudagent-python\/aries-cloudagent-python\//\/github\/workspace\//g' test-reports/coverage.xml + - name: SonarCloud Scan + uses: SonarSource/sonarcloud-github-action@master + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + with: + args: > + -Dsonar.python.coverage.reportPaths=test-reports/coverage.xml + -Dsonar.coverage.exclusions=**/tests/*,**/demo/*,**/docs/*,**/docker/*,**/scripts/* + -Dsonar.sources=./ \ No newline at end of file diff --git a/.github/workflows/sonar-pr.yml b/.github/workflows/sonar-pr.yml new file mode 100644 index 0000000000..a88ab3e060 --- /dev/null +++ b/.github/workflows/sonar-pr.yml @@ -0,0 +1,73 @@ +name: Sonar Scan and Coverage + +on: + workflow_run: + workflows: [ PR Tests ] + types: + - completed + +jobs: + SonarCloud: + runs-on: ubuntu-latest + if: github.event.workflow_run.conclusion == 'success' + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Download PR number artifact + uses: dawidd6/action-download-artifact@v3 + with: + workflow: Tests + run_id: ${{ github.event.workflow_run.id }} + name: PR_NUMBER + - name: Read PR_NUMBER + id: pr_number + uses: juliangruber/read-file-action@v1 + with: + path: ./PR_NUMBER + - name: Download Test Coverage + uses: dawidd6/action-download-artifact@v3 + with: + workflow: Tests + run_id: ${{ github.event.workflow_run.id }} + name: TEST_COV + - name: Request GitHub API for PR data + uses: octokit/request-action@v2.x + id: get_pr_data + with: + route: GET /repos/${{ github.event.repository.full_name }}/pulls/${{ steps.pr_number.outputs.content }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Checkout base branch + run: | + echo forked repo = ${{ fromJson(steps.get_pr_data.outputs.data).head.repo.html_url }}.git + echo base repo = ${{ github.event.repository.clone_url }} + + git remote add upstream ${{ fromJson(steps.get_pr_data.outputs.data).head.repo.html_url }}.git + git fetch --all + + echo pr number = ${{ fromJson(steps.get_pr_data.outputs.data).number }} + echo forked branch = ${{ fromJson(steps.get_pr_data.outputs.data).head.ref }} + echo base branch = ${{ fromJson(steps.get_pr_data.outputs.data).base.ref }} + + git checkout -B temp-branch-for-scanning upstream/${{ fromJson(steps.get_pr_data.outputs.data).head.ref }} + - name: Move Coverage Report And Adjust Source + run: | + mkdir test-reports + mv coverage.xml test-reports + # Need to change source in coverage report because it was generated from another context + sed -i 's/\/home\/runner\/work\/aries-cloudagent-python\/aries-cloudagent-python\//\/github\/workspace\//g' test-reports/coverage.xml + - name: SonarCloud Scan + uses: SonarSource/sonarcloud-github-action@master + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + with: + args: > + -Dsonar.scm.revision=${{ github.event.workflow_run.head_sha }} + -Dsonar.pullrequest.key=${{ fromJson(steps.get_pr_data.outputs.data).number }} + -Dsonar.pullrequest.branch=${{ fromJson(steps.get_pr_data.outputs.data).head.ref }} + -Dsonar.pullrequest.base=${{ fromJson(steps.get_pr_data.outputs.data).base.ref }} + -Dsonar.coverage.exclusions=**/tests/*,**/demo/*,**/docs/*,**/docker/*,**/scripts/* + -Dsonar.python.coverage.reportPaths=test-reports/coverage.xml + -Dsonar.sources=./ \ No newline at end of file diff --git a/.github/workflows/tests-indy.yml b/.github/workflows/tests-indy.yml deleted file mode 100644 index 8b7651a39f..0000000000 --- a/.github/workflows/tests-indy.yml +++ /dev/null @@ -1,58 +0,0 @@ -name: Tests (Indy) - -on: - workflow_call: - inputs: - python-version: - required: true - type: string - indy-version: - required: true - type: string - os: - required: true - type: string - -jobs: - tests: - name: Test Python ${{ inputs.python-version }} on Indy ${{ inputs.indy-version }} - runs-on: ${{ inputs.os }} - steps: - - uses: actions/checkout@v4 - - - name: Cache image layers - uses: actions/cache@v4 - with: - path: /tmp/.buildx-cache-test - key: ${{ runner.os }}-buildx-test-${{ github.sha }} - restore-keys: | - ${{ runner.os }}-buildx-test- - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Build test image - uses: docker/build-push-action@v5 - with: - load: true - context: . - file: docker/Dockerfile.indy - target: acapy-test - tags: acapy-test:latest - build-args: | - python_version=${{ inputs.python-version }} - indy_version=${{ inputs.indy-version }} - cache-from: type=local,src=/tmp/.buildx-cache-test - cache-to: type=local,dest=/tmp/.buildx-cache-test-new,mode=max - - # Temp fix - # https://github.com/docker/build-push-action/issues/252 - # https://github.com/moby/buildkit/issues/1896 - - name: Move cache - run: | - rm -rf /tmp/.buildx-cache-test - mv /tmp/.buildx-cache-test-new /tmp/.buildx-cache-test - - - name: Run pytest - run: | - docker run --rm acapy-test:latest diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml deleted file mode 100644 index 62699408a3..0000000000 --- a/.github/workflows/tests.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: Tests - -on: - workflow_call: - inputs: - python-version: - required: true - type: string - os: - required: true - type: string - -jobs: - tests: - name: Test Python ${{ inputs.python-version }} - runs-on: ${{ inputs.os }} - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ inputs.python-version }} - cache: 'pip' - cache-dependency-path: 'requirements*.txt' - - name: Install the project dependencies - run: | - pip install poetry - poetry install -E "askar bbs" - - name: Tests - run: | - poetry run pytest 2>&1 | tee pytest.log - PYTEST_EXIT_CODE=${PIPESTATUS[0]} - if grep -Eq "RuntimeWarning: coroutine .* was never awaited" pytest.log; then - echo "Failure: Detected unawaited coroutine warning in pytest output." - exit 1 - fi - exit $PYTEST_EXIT_CODE diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 038ab3fb1f..0901419505 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/alessandrojcm/commitlint-pre-commit-hook - rev: v9.5.0 + rev: v9.16.0 hooks: - id: commitlint stages: [commit-msg] @@ -8,13 +8,13 @@ repos: additional_dependencies: ['@commitlint/config-conventional'] - repo: https://github.com/psf/black # Ensure this is synced with pyproject.toml - rev: 24.1.1 + rev: 24.4.2 hooks: - id: black stages: [commit] - repo: https://github.com/astral-sh/ruff-pre-commit # Ensure this is synced with pyproject.toml - rev: v0.1.2 + rev: v0.4.4 hooks: - id: ruff stages: [commit] diff --git a/README.md b/README.md index 4077f04249..d25d2b5814 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,6 @@ # Hyperledger Aries Cloud Agent - Python [![pypi releases](https://img.shields.io/pypi/v/aries_cloudagent)](https://pypi.org/project/aries-cloudagent/) -[![codecov](https://codecov.io/gh/hyperledger/aries-cloudagent-python/branch/main/graph/badge.svg)](https://codecov.io/gh/hyperledger/aries-cloudagent-python) diff --git a/aries_cloudagent/admin/decorators/auth.py b/aries_cloudagent/admin/decorators/auth.py new file mode 100644 index 0000000000..818f297d40 --- /dev/null +++ b/aries_cloudagent/admin/decorators/auth.py @@ -0,0 +1,80 @@ +"""Authentication decorators for the admin API.""" + +import functools + +from aiohttp import web + +from ...utils import general as general_utils +from ..request_context import AdminRequestContext + + +def admin_authentication(handler): + """Decorator to require authentication via admin API key. + + The decorator will check for a valid x-api-key header and + reject the request if it is missing or invalid. + If the agent is running in insecure mode, the request will be allowed without a key. + """ + + @functools.wraps(handler) + async def admin_auth(request): + context: AdminRequestContext = request["context"] + profile = context.profile + header_admin_api_key = request.headers.get("x-api-key") + valid_key = general_utils.const_compare( + profile.settings.get("admin.admin_api_key"), header_admin_api_key + ) + insecure_mode = bool(profile.settings.get("admin.admin_insecure_mode")) + + # We have to allow OPTIONS method access to paths without a key since + # browsers performing CORS requests will never include the original + # x-api-key header from the method that triggered the preflight + # OPTIONS check. + if insecure_mode or valid_key or (request.method == "OPTIONS"): + return await handler(request) + else: + raise web.HTTPUnauthorized( + reason="API Key invalid or missing", + text="API Key invalid or missing", + ) + + return admin_auth + + +def tenant_authentication(handler): + """Decorator to enable non-admin authentication. + + The decorator will: + - check for a valid bearer token in the Autorization header if running + in multi-tenant mode + - check for a valid x-api-key header if running in single-tenant mode + """ + + @functools.wraps(handler) + async def tenant_auth(request): + context: AdminRequestContext = request["context"] + profile = context.profile + authorization_header = request.headers.get("Authorization") + header_admin_api_key = request.headers.get("x-api-key") + valid_key = general_utils.const_compare( + profile.settings.get("admin.admin_api_key"), header_admin_api_key + ) + insecure_mode = bool(profile.settings.get("admin.admin_insecure_mode")) + multitenant_enabled = profile.settings.get("multitenant.enabled") + + # CORS fix: allow OPTIONS method access to paths without a token + if ( + (multitenant_enabled and authorization_header) + or (not multitenant_enabled and valid_key) + or insecure_mode + or request.method == "OPTIONS" + ): + return await handler(request) + else: + auth_mode = "Authorization token" if multitenant_enabled else "API key" + raise web.HTTPUnauthorized( + reason=f"{auth_mode} missing or invalid", + text=f"{auth_mode} missing or invalid", + ) + + return tenant_auth diff --git a/aries_cloudagent/admin/request_context.py b/aries_cloudagent/admin/request_context.py index 159334c685..215a64f3bb 100644 --- a/aries_cloudagent/admin/request_context.py +++ b/aries_cloudagent/admin/request_context.py @@ -21,13 +21,13 @@ def __init__( self, profile: Profile, *, - context: InjectionContext = None, - settings: Mapping[str, object] = None, - root_profile: Profile = None, - metadata: dict = None + context: Optional[InjectionContext] = None, + settings: Optional[Mapping[str, object]] = None, + root_profile: Optional[Profile] = None, + metadata: Optional[dict] = None ): """Initialize an instance of AdminRequestContext.""" - self._context = (context or profile.context).start_scope("admin", settings) + self._context = (context or profile.context).start_scope(settings) self._profile = profile self._root_profile = root_profile self._metadata = metadata @@ -72,7 +72,7 @@ def transaction(self) -> ProfileSession: def inject( self, base_cls: Type[InjectType], - settings: Mapping[str, object] = None, + settings: Optional[Mapping[str, object]] = None, ) -> InjectType: """Get the provided instance of a given class identifier. @@ -89,7 +89,7 @@ def inject( def inject_or( self, base_cls: Type[InjectType], - settings: Mapping[str, object] = None, + settings: Optional[Mapping[str, object]] = None, default: Optional[InjectType] = None, ) -> Optional[InjectType]: """Get the provided instance of a given class identifier or default if not found. @@ -111,7 +111,7 @@ def update_settings(self, settings: Mapping[str, object]): @classmethod def test_context( - cls, session_inject: dict = None, profile: Profile = None + cls, session_inject: Optional[dict] = None, profile: Optional[Profile] = None ) -> "AdminRequestContext": """Quickly set up a new admin request context for tests.""" ctx = AdminRequestContext(profile or IN_MEM.resolved.test_profile()) diff --git a/aries_cloudagent/admin/routes.py b/aries_cloudagent/admin/routes.py new file mode 100644 index 0000000000..62abab842a --- /dev/null +++ b/aries_cloudagent/admin/routes.py @@ -0,0 +1,232 @@ +"""Admin server routes.""" + +import asyncio +import re + +from aiohttp import web +from aiohttp_apispec import ( + docs, + response_schema, +) +from marshmallow import fields + +from ..core.plugin_registry import PluginRegistry +from ..messaging.models.openapi import OpenAPISchema +from ..utils.stats import Collector +from ..version import __version__ +from .decorators.auth import admin_authentication + + +class AdminModulesSchema(OpenAPISchema): + """Schema for the modules endpoint.""" + + result = fields.List( + fields.Str(metadata={"description": "admin module"}), + metadata={"description": "List of admin modules"}, + ) + + +class AdminConfigSchema(OpenAPISchema): + """Schema for the config endpoint.""" + + config = fields.Dict( + required=True, metadata={"description": "Configuration settings"} + ) + + +class AdminStatusSchema(OpenAPISchema): + """Schema for the status endpoint.""" + + version = fields.Str(metadata={"description": "Version code"}) + label = fields.Str(allow_none=True, metadata={"description": "Default label"}) + timing = fields.Dict(required=False, metadata={"description": "Timing results"}) + conductor = fields.Dict( + required=False, metadata={"description": "Conductor statistics"} + ) + + +class AdminResetSchema(OpenAPISchema): + """Schema for the reset endpoint.""" + + +class AdminStatusLivelinessSchema(OpenAPISchema): + """Schema for the liveliness endpoint.""" + + alive = fields.Boolean( + metadata={"description": "Liveliness status", "example": True} + ) + + +class AdminStatusReadinessSchema(OpenAPISchema): + """Schema for the readiness endpoint.""" + + ready = fields.Boolean( + metadata={"description": "Readiness status", "example": True} + ) + + +class AdminShutdownSchema(OpenAPISchema): + """Response schema for admin Module.""" + + +@docs(tags=["server"], summary="Fetch the list of loaded plugins") +@response_schema(AdminModulesSchema(), 200, description="") +@admin_authentication +async def plugins_handler(request: web.BaseRequest): + """Request handler for the loaded plugins list. + + Args: + request: aiohttp request object + + Returns: + The module list response + + """ + registry = request.app["context"].inject_or(PluginRegistry) + plugins = registry and sorted(registry.plugin_names) or [] + return web.json_response({"result": plugins}) + + +@docs(tags=["server"], summary="Fetch the server configuration") +@response_schema(AdminConfigSchema(), 200, description="") +@admin_authentication +async def config_handler(request: web.BaseRequest): + """Request handler for the server configuration. + + Args: + request: aiohttp request object + + Returns: + The web response + + """ + config = { + k: ( + request.app["context"].settings[k] + if (isinstance(request.app["context"].settings[k], (str, int))) + else request.app["context"].settings[k].copy() + ) + for k in request.app["context"].settings + if k + not in [ + "admin.admin_api_key", + "multitenant.jwt_secret", + "wallet.key", + "wallet.rekey", + "wallet.seed", + "wallet.storage_creds", + ] + } + for index in range(len(config.get("admin.webhook_urls", []))): + config["admin.webhook_urls"][index] = re.sub( + r"#.*", + "", + config["admin.webhook_urls"][index], + ) + + return web.json_response({"config": config}) + + +@docs(tags=["server"], summary="Fetch the server status") +@response_schema(AdminStatusSchema(), 200, description="") +@admin_authentication +async def status_handler(request: web.BaseRequest): + """Request handler for the server status information. + + Args: + request: aiohttp request object + + Returns: + The web response + + """ + status = {"version": __version__} + status["label"] = request.app["context"].settings.get("default_label") + collector = request.app["context"].inject_or(Collector) + if collector: + status["timing"] = collector.results + if request.app["conductor_stats"]: + status["conductor"] = await request.app["conductor_stats"]() + return web.json_response(status) + + +@docs(tags=["server"], summary="Reset statistics") +@response_schema(AdminResetSchema(), 200, description="") +@admin_authentication +async def status_reset_handler(request: web.BaseRequest): + """Request handler for resetting the timing statistics. + + Args: + request: aiohttp request object + + Returns: + The web response + + """ + collector = request.app["context"].inject_or(Collector) + if collector: + collector.reset() + return web.json_response({}) + + +async def redirect_handler(request: web.BaseRequest): + """Perform redirect to documentation.""" + raise web.HTTPFound("/api/doc") + + +@docs(tags=["server"], summary="Liveliness check") +@response_schema(AdminStatusLivelinessSchema(), 200, description="") +async def liveliness_handler(request: web.BaseRequest): + """Request handler for liveliness check. + + Args: + request: aiohttp request object + + Returns: + The web response, always indicating True + + """ + app_live = request.app._state["alive"] + if app_live: + return web.json_response({"alive": app_live}) + else: + raise web.HTTPServiceUnavailable(reason="Service not available") + + +@docs(tags=["server"], summary="Readiness check") +@response_schema(AdminStatusReadinessSchema(), 200, description="") +async def readiness_handler(request: web.BaseRequest): + """Request handler for liveliness check. + + Args: + request: aiohttp request object + + Returns: + The web response, indicating readiness for further calls + + """ + app_ready = request.app._state["ready"] and request.app._state["alive"] + if app_ready: + return web.json_response({"ready": app_ready}) + else: + raise web.HTTPServiceUnavailable(reason="Service not ready") + + +@docs(tags=["server"], summary="Shut down server") +@response_schema(AdminShutdownSchema(), description="") +@admin_authentication +async def shutdown_handler(request: web.BaseRequest): + """Request handler for server shutdown. + + Args: + request: aiohttp request object + + Returns: + The web response (empty production) + + """ + request.app._state["ready"] = False + loop = asyncio.get_event_loop() + asyncio.ensure_future(request.app["conductor_stop"](), loop=loop) + + return web.json_response({}) diff --git a/aries_cloudagent/admin/server.py b/aries_cloudagent/admin/server.py index c5fb4dc516..eb755c14a6 100644 --- a/aries_cloudagent/admin/server.py +++ b/aries_cloudagent/admin/server.py @@ -3,23 +3,17 @@ import asyncio import logging import re -import uuid import warnings import weakref -from hmac import compare_digest from typing import Callable, Coroutine, Optional, Pattern, Sequence, cast import aiohttp_cors import jwt from aiohttp import web -from aiohttp_apispec import ( - docs, - response_schema, - setup_aiohttp_apispec, - validation_middleware, -) +from aiohttp_apispec import setup_aiohttp_apispec, validation_middleware +from uuid_utils import uuid4 -from marshmallow import fields +from aries_cloudagent.wallet import singletons from ..config.injection_context import InjectionContext from ..config.logging import context_wallet_id @@ -27,20 +21,32 @@ from ..core.plugin_registry import PluginRegistry from ..core.profile import Profile from ..ledger.error import LedgerConfigError, LedgerTransactionError -from ..messaging.models.openapi import OpenAPISchema from ..messaging.responder import BaseResponder -from ..messaging.valid import UUIDFour from ..multitenant.base import BaseMultitenantManager, MultitenantManagerError +from ..storage.base import BaseStorage from ..storage.error import StorageNotFoundError +from ..storage.type import RECORD_TYPE_ACAPY_UPGRADING from ..transport.outbound.message import OutboundMessage from ..transport.outbound.status import OutboundSendStatus from ..transport.queue.basic import BasicMessageQueue +from ..utils import general as general_utils from ..utils.stats import Collector from ..utils.task_queue import TaskQueue from ..version import __version__ +from ..wallet.anoncreds_upgrade import check_upgrade_completion_loop from .base_server import BaseAdminServer from .error import AdminSetupError from .request_context import AdminRequestContext +from .routes import ( + config_handler, + liveliness_handler, + plugins_handler, + readiness_handler, + redirect_handler, + shutdown_handler, + status_handler, + status_reset_handler, +) LOGGER = logging.getLogger(__name__) @@ -58,57 +64,8 @@ "acapy::keylist::updated": "keylist", } - -class AdminModulesSchema(OpenAPISchema): - """Schema for the modules endpoint.""" - - result = fields.List( - fields.Str(metadata={"description": "admin module"}), - metadata={"description": "List of admin modules"}, - ) - - -class AdminConfigSchema(OpenAPISchema): - """Schema for the config endpoint.""" - - config = fields.Dict( - required=True, metadata={"description": "Configuration settings"} - ) - - -class AdminStatusSchema(OpenAPISchema): - """Schema for the status endpoint.""" - - version = fields.Str(metadata={"description": "Version code"}) - label = fields.Str(allow_none=True, metadata={"description": "Default label"}) - timing = fields.Dict(required=False, metadata={"description": "Timing results"}) - conductor = fields.Dict( - required=False, metadata={"description": "Conductor statistics"} - ) - - -class AdminResetSchema(OpenAPISchema): - """Schema for the reset endpoint.""" - - -class AdminStatusLivelinessSchema(OpenAPISchema): - """Schema for the liveliness endpoint.""" - - alive = fields.Boolean( - metadata={"description": "Liveliness status", "example": True} - ) - - -class AdminStatusReadinessSchema(OpenAPISchema): - """Schema for the readiness endpoint.""" - - ready = fields.Boolean( - metadata={"description": "Readiness status", "example": True} - ) - - -class AdminShutdownSchema(OpenAPISchema): - """Response schema for admin Module.""" +anoncreds_wallets = singletons.IsAnoncredsSingleton().wallets +in_progress_upgrades = singletons.UpgradeInProgressSingleton() class AdminResponder(BaseResponder): @@ -205,6 +162,40 @@ async def ready_middleware(request: web.BaseRequest, handler: Coroutine): raise web.HTTPServiceUnavailable(reason="Shutdown in progress") +@web.middleware +async def upgrade_middleware(request: web.BaseRequest, handler: Coroutine): + """Blocking middleware for upgrades.""" + context: AdminRequestContext = request["context"] + + # Already upgraded + if context.profile.name in anoncreds_wallets: + return await handler(request) + + # Upgrade in progress + if context.profile.name in in_progress_upgrades.wallets: + raise web.HTTPServiceUnavailable(reason="Upgrade in progress") + + # Avoid try/except in middleware with find_all_records + upgrade_initiated = [] + async with context.profile.session() as session: + storage = session.inject(BaseStorage) + upgrade_initiated = await storage.find_all_records(RECORD_TYPE_ACAPY_UPGRADING) + if upgrade_initiated: + # If we get here, than another instance started an upgrade + # We need to check for completion (or fail) in another process + in_progress_upgrades.set_wallet(context.profile.name) + is_subwallet = context.metadata and "wallet_id" in context.metadata + asyncio.create_task( + check_upgrade_completion_loop( + context.profile, + is_subwallet, + ) + ) + raise web.HTTPServiceUnavailable(reason="Upgrade in progress") + + return await handler(request) + + @web.middleware async def debug_middleware(request: web.BaseRequest, handler: Coroutine): """Show request detail in debug log.""" @@ -218,13 +209,6 @@ async def debug_middleware(request: web.BaseRequest, handler: Coroutine): return await handler(request) -def const_compare(string1, string2): - """Compare two strings in constant time.""" - if string1 is None or string2 is None: - return False - return compare_digest(string1.encode(), string2.encode()) - - class AdminServer(BaseAdminServer): """Admin HTTP server class.""" @@ -272,8 +256,6 @@ def __init__( self.multitenant_manager = context.inject_or(BaseMultitenantManager) self._additional_route_pattern: Optional[Pattern] = None - self.server_paths = [] - @property def additional_routes_pattern(self) -> Optional[Pattern]: """Pattern for configured additional routes to permit base wallet to access.""" @@ -306,87 +288,8 @@ async def make_application(self) -> web.Application: # we check here. assert self.admin_insecure_mode ^ bool(self.admin_api_key) - def is_unprotected_path(path: str): - return path in [ - "/api/doc", - "/api/docs/swagger.json", - "/favicon.ico", - "/ws", # ws handler checks authentication - "/status/live", - "/status/ready", - ] or path.startswith("/static/swagger/") - - # If admin_api_key is None, then admin_insecure_mode must be set so - # we can safely enable the admin server with no security - if self.admin_api_key: - - @web.middleware - async def check_token(request: web.Request, handler): - header_admin_api_key = request.headers.get("x-api-key") - valid_key = const_compare(self.admin_api_key, header_admin_api_key) - - # We have to allow OPTIONS method access to paths without a key since - # browsers performing CORS requests will never include the original - # x-api-key header from the method that triggered the preflight - # OPTIONS check. - if ( - valid_key - or is_unprotected_path(request.path) - or (request.method == "OPTIONS") - ): - return await handler(request) - else: - raise web.HTTPUnauthorized() - - middlewares.append(check_token) - collector = self.context.inject_or(Collector) - if self.multitenant_manager: - - @web.middleware - async def check_multitenant_authorization(request: web.Request, handler): - authorization_header = request.headers.get("Authorization") - path = request.path - - is_multitenancy_path = path.startswith("/multitenancy") - is_server_path = path in self.server_paths or path == "/features" - - # subwallets are not allowed to access multitenancy routes - if authorization_header and is_multitenancy_path: - raise web.HTTPUnauthorized() - - base_limited_access_path = ( - re.match( - f"^/connections/(?:receive-invitation|{UUIDFour.PATTERN})", path - ) - or path.startswith("/out-of-band/receive-invitation") - or path.startswith("/mediation/requests/") - or re.match( - f"/mediation/(?:request/{UUIDFour.PATTERN}|" - f"{UUIDFour.PATTERN}/default-mediator)", - path, - ) - or path.startswith("/mediation/default-mediator") - or self._matches_additional_routes(path) - ) - - # base wallet is not allowed to perform ssi related actions. - # Only multitenancy and general server actions - if ( - not authorization_header - and not is_multitenancy_path - and not is_server_path - and not is_unprotected_path(path) - and not base_limited_access_path - and not (request.method == "OPTIONS") # CORS fix - ): - raise web.HTTPUnauthorized() - - return await handler(request) - - middlewares.append(check_multitenant_authorization) - @web.middleware async def setup_context(request: web.Request, handler): authorization_header = request.headers.get("Authorization") @@ -453,6 +356,9 @@ async def setup_context(request: web.Request, handler): middlewares.append(setup_context) + # Upgrade middleware needs the context setup + middlewares.append(upgrade_middleware) + # Register validation_middleware last avoiding unauthorized validations middlewares.append(validation_middleware) @@ -466,19 +372,16 @@ async def setup_context(request: web.Request, handler): ) server_routes = [ - web.get("/", self.redirect_handler, allow_head=True), - web.get("/plugins", self.plugins_handler, allow_head=False), - web.get("/status", self.status_handler, allow_head=False), - web.get("/status/config", self.config_handler, allow_head=False), - web.post("/status/reset", self.status_reset_handler), - web.get("/status/live", self.liveliness_handler, allow_head=False), - web.get("/status/ready", self.readiness_handler, allow_head=False), - web.get("/shutdown", self.shutdown_handler, allow_head=False), + web.get("/", redirect_handler, allow_head=True), + web.get("/plugins", plugins_handler, allow_head=False), + web.get("/status", status_handler, allow_head=False), + web.get("/status/config", config_handler, allow_head=False), + web.post("/status/reset", status_reset_handler), + web.get("/status/live", liveliness_handler, allow_head=False), + web.get("/status/ready", readiness_handler, allow_head=False), + web.get("/shutdown", shutdown_handler, allow_head=False), web.get("/ws", self.websocket_handler, allow_head=False), ] - - # Store server_paths for multitenant authorization handling - self.server_paths = [route.path for route in server_routes] app.add_routes(server_routes) plugin_registry = self.context.inject_or(PluginRegistry) @@ -511,6 +414,11 @@ async def setup_context(request: web.Request, handler): app._state["ready"] = False app._state["alive"] = False + # set global-like variables + app["context"] = self.context + app["conductor_stats"] = self.conductor_stats + app["conductor_stop"] = self.conductor_stop + return app async def start(self) -> None: @@ -583,6 +491,10 @@ def sort_dict(raw: dict) -> dict: async def stop(self) -> None: """Stop the webserver.""" + # Stopped before admin server is created + if not self.app: + return + self.app._state["ready"] = False # in case call does not come through OpenAPI for queue in self.websocket_queues.values(): queue.stop() @@ -622,156 +534,6 @@ async def on_startup(self, app: web.Application): swagger["securityDefinitions"] = security_definitions swagger["security"] = security - @docs(tags=["server"], summary="Fetch the list of loaded plugins") - @response_schema(AdminModulesSchema(), 200, description="") - async def plugins_handler(self, request: web.BaseRequest): - """Request handler for the loaded plugins list. - - Args: - request: aiohttp request object - - Returns: - The module list response - - """ - registry = self.context.inject_or(PluginRegistry) - plugins = registry and sorted(registry.plugin_names) or [] - return web.json_response({"result": plugins}) - - @docs(tags=["server"], summary="Fetch the server configuration") - @response_schema(AdminConfigSchema(), 200, description="") - async def config_handler(self, request: web.BaseRequest): - """Request handler for the server configuration. - - Args: - request: aiohttp request object - - Returns: - The web response - - """ - config = { - k: ( - self.context.settings[k] - if (isinstance(self.context.settings[k], (str, int))) - else self.context.settings[k].copy() - ) - for k in self.context.settings - if k - not in [ - "admin.admin_api_key", - "multitenant.jwt_secret", - "wallet.key", - "wallet.rekey", - "wallet.seed", - "wallet.storage_creds", - ] - } - for index in range(len(config.get("admin.webhook_urls", []))): - config["admin.webhook_urls"][index] = re.sub( - r"#.*", - "", - config["admin.webhook_urls"][index], - ) - - return web.json_response({"config": config}) - - @docs(tags=["server"], summary="Fetch the server status") - @response_schema(AdminStatusSchema(), 200, description="") - async def status_handler(self, request: web.BaseRequest): - """Request handler for the server status information. - - Args: - request: aiohttp request object - - Returns: - The web response - - """ - status = {"version": __version__} - status["label"] = self.context.settings.get("default_label") - collector = self.context.inject_or(Collector) - if collector: - status["timing"] = collector.results - if self.conductor_stats: - status["conductor"] = await self.conductor_stats() - return web.json_response(status) - - @docs(tags=["server"], summary="Reset statistics") - @response_schema(AdminResetSchema(), 200, description="") - async def status_reset_handler(self, request: web.BaseRequest): - """Request handler for resetting the timing statistics. - - Args: - request: aiohttp request object - - Returns: - The web response - - """ - collector = self.context.inject_or(Collector) - if collector: - collector.reset() - return web.json_response({}) - - async def redirect_handler(self, request: web.BaseRequest): - """Perform redirect to documentation.""" - raise web.HTTPFound("/api/doc") - - @docs(tags=["server"], summary="Liveliness check") - @response_schema(AdminStatusLivelinessSchema(), 200, description="") - async def liveliness_handler(self, request: web.BaseRequest): - """Request handler for liveliness check. - - Args: - request: aiohttp request object - - Returns: - The web response, always indicating True - - """ - app_live = self.app._state["alive"] - if app_live: - return web.json_response({"alive": app_live}) - else: - raise web.HTTPServiceUnavailable(reason="Service not available") - - @docs(tags=["server"], summary="Readiness check") - @response_schema(AdminStatusReadinessSchema(), 200, description="") - async def readiness_handler(self, request: web.BaseRequest): - """Request handler for liveliness check. - - Args: - request: aiohttp request object - - Returns: - The web response, indicating readiness for further calls - - """ - app_ready = self.app._state["ready"] and self.app._state["alive"] - if app_ready: - return web.json_response({"ready": app_ready}) - else: - raise web.HTTPServiceUnavailable(reason="Service not ready") - - @docs(tags=["server"], summary="Shut down server") - @response_schema(AdminShutdownSchema(), description="") - async def shutdown_handler(self, request: web.BaseRequest): - """Request handler for server shutdown. - - Args: - request: aiohttp request object - - Returns: - The web response (empty production) - - """ - self.app._state["ready"] = False - loop = asyncio.get_event_loop() - asyncio.ensure_future(self.conductor_stop(), loop=loop) - - return web.json_response({}) - def notify_fatal_error(self): """Set our readiness flags to force a restart (openshift).""" LOGGER.error("Received shutdown request notify_fatal_error()") @@ -783,7 +545,7 @@ async def websocket_handler(self, request): ws = web.WebSocketResponse() await ws.prepare(request) - socket_id = str(uuid.uuid4()) + socket_id = str(uuid4()) queue = BasicMessageQueue() loop = asyncio.get_event_loop() @@ -793,7 +555,7 @@ async def websocket_handler(self, request): else: header_admin_api_key = request.headers.get("x-api-key") # authenticated via http header? - queue.authenticated = const_compare( + queue.authenticated = general_utils.const_compare( header_admin_api_key, self.admin_api_key ) @@ -838,7 +600,7 @@ async def websocket_handler(self, request): LOGGER.exception( "Exception in websocket receiving task:" ) - if self.admin_api_key and const_compare( + if self.admin_api_key and general_utils.const_compare( self.admin_api_key, msg_api_key ): # authenticated via websocket message diff --git a/aries_cloudagent/admin/tests/test_admin_server.py b/aries_cloudagent/admin/tests/test_admin_server.py index 300e82f758..24c8ebfe6c 100644 --- a/aries_cloudagent/admin/tests/test_admin_server.py +++ b/aries_cloudagent/admin/tests/test_admin_server.py @@ -1,22 +1,28 @@ import gc import json +from unittest import IsolatedAsyncioTestCase import pytest -from aries_cloudagent.tests import mock -from unittest import IsolatedAsyncioTestCase from aiohttp import ClientSession, DummyCookieJar, TCPConnector, web from aiohttp.test_utils import unused_port +from aries_cloudagent.tests import mock +from aries_cloudagent.wallet import singletons + from ...config.default_context import DefaultContextBuilder from ...config.injection_context import InjectionContext from ...core.event_bus import Event +from ...core.goal_code_registry import GoalCodeRegistry from ...core.in_memory import InMemoryProfile from ...core.protocol_registry import ProtocolRegistry -from ...core.goal_code_registry import GoalCodeRegistry +from ...storage.base import BaseStorage +from ...storage.record import StorageRecord +from ...storage.type import RECORD_TYPE_ACAPY_UPGRADING from ...utils.stats import Collector from ...utils.task_queue import TaskQueue - +from ...wallet.anoncreds_upgrade import UPGRADING_RECORD_IN_PROGRESS from .. import server as test_module +from ..request_context import AdminRequestContext from ..server import AdminServer, AdminSetupError @@ -119,7 +125,7 @@ def get_admin_server( collector = Collector() context.injector.bind_instance(test_module.Collector, collector) - profile = InMemoryProfile.test_profile() + profile = InMemoryProfile.test_profile(settings=settings) self.port = unused_port() return AdminServer( @@ -190,105 +196,6 @@ async def test_import_routes(self): server = self.get_admin_server({"admin.admin_insecure_mode": True}, context) app = await server.make_application() - async def test_import_routes_multitenant_middleware(self): - # imports all default admin routes - context = InjectionContext( - settings={"multitenant.base_wallet_routes": ["/test"]} - ) - context.injector.bind_instance(ProtocolRegistry, ProtocolRegistry()) - context.injector.bind_instance(GoalCodeRegistry, GoalCodeRegistry()) - context.injector.bind_instance( - test_module.BaseMultitenantManager, - mock.MagicMock(spec=test_module.BaseMultitenantManager), - ) - await DefaultContextBuilder().load_plugins(context) - server = self.get_admin_server( - { - "admin.admin_insecure_mode": False, - "admin.admin_api_key": "test-api-key", - }, - context, - ) - - # cover multitenancy start code - app = await server.make_application() - app["swagger_dict"] = {} - await server.on_startup(app) - - # multitenant authz - [mt_authz_middle] = [ - m for m in app.middlewares if ".check_multitenant_authorization" in str(m) - ] - - mock_request = mock.MagicMock( - method="GET", - headers={"Authorization": "Bearer ..."}, - path="/multitenancy/etc", - text=mock.CoroutineMock(return_value="abc123"), - ) - with self.assertRaises(test_module.web.HTTPUnauthorized): - await mt_authz_middle(mock_request, None) - - mock_request = mock.MagicMock( - method="GET", - headers={}, - path="/protected/non-multitenancy/non-server", - text=mock.CoroutineMock(return_value="abc123"), - ) - with self.assertRaises(test_module.web.HTTPUnauthorized): - await mt_authz_middle(mock_request, None) - - mock_request = mock.MagicMock( - method="GET", - headers={"Authorization": "Bearer ..."}, - path="/protected/non-multitenancy/non-server", - text=mock.CoroutineMock(return_value="abc123"), - ) - mock_handler = mock.CoroutineMock() - await mt_authz_middle(mock_request, mock_handler) - mock_handler.assert_called_once_with(mock_request) - - mock_request = mock.MagicMock( - method="GET", - headers={"Authorization": "Non-bearer ..."}, - path="/test", - text=mock.CoroutineMock(return_value="abc123"), - ) - mock_handler = mock.CoroutineMock() - await mt_authz_middle(mock_request, mock_handler) - mock_handler.assert_called_once_with(mock_request) - - # multitenant setup context exception paths - [setup_ctx_middle] = [m for m in app.middlewares if ".setup_context" in str(m)] - - mock_request = mock.MagicMock( - method="GET", - headers={"Authorization": "Non-bearer ..."}, - path="/protected/non-multitenancy/non-server", - text=mock.CoroutineMock(return_value="abc123"), - ) - with self.assertRaises(test_module.web.HTTPUnauthorized): - await setup_ctx_middle(mock_request, None) - - mock_request = mock.MagicMock( - method="GET", - headers={"Authorization": "Bearer ..."}, - path="/protected/non-multitenancy/non-server", - text=mock.CoroutineMock(return_value="abc123"), - ) - with mock.patch.object( - server.multitenant_manager, - "get_profile_for_token", - mock.CoroutineMock(), - ) as mock_get_profile: - mock_get_profile.side_effect = [ - test_module.MultitenantManagerError("corrupt token"), - test_module.StorageNotFoundError("out of memory"), - ] - for i in range(2): - with self.assertRaises(test_module.web.HTTPUnauthorized): - await setup_ctx_middle(mock_request, None) - async def test_register_external_plugin_x(self): context = InjectionContext() context.injector.bind_instance(ProtocolRegistry, ProtocolRegistry()) @@ -477,6 +384,47 @@ async def test_server_health_state(self): assert response.status == 503 await server.stop() + async def test_upgrade_middleware(self): + profile = InMemoryProfile.test_profile() + self.context = AdminRequestContext.test_context({}, profile) + self.request_dict = { + "context": self.context, + } + request = mock.MagicMock( + method="GET", + path_qs="/schemas/created", + match_info={}, + __getitem__=lambda _, k: self.request_dict[k], + ) + handler = mock.CoroutineMock() + + await test_module.upgrade_middleware(request, handler) + + async with profile.session() as session: + storage = session.inject(BaseStorage) + upgrading_record = StorageRecord( + RECORD_TYPE_ACAPY_UPGRADING, + UPGRADING_RECORD_IN_PROGRESS, + ) + # No upgrade in progress + await storage.add_record(upgrading_record) + + # Upgrade in progress without cache + with self.assertRaises(test_module.web.HTTPServiceUnavailable): + await test_module.upgrade_middleware(request, handler) + + # Upgrade in progress with cache + singletons.UpgradeInProgressSingleton().set_wallet("test-profile") + with self.assertRaises(test_module.web.HTTPServiceUnavailable): + await test_module.upgrade_middleware(request, handler) + + singletons.UpgradeInProgressSingleton().remove_wallet("test-profile") + await storage.delete_record(upgrading_record) + + # Upgrade in progress with cache + singletons.IsAnoncredsSingleton().set_wallet("test-profile") + await test_module.upgrade_middleware(request, handler) + @pytest.fixture async def server(): diff --git a/aries_cloudagent/admin/tests/test_auth.py b/aries_cloudagent/admin/tests/test_auth.py new file mode 100644 index 0000000000..2d6700a147 --- /dev/null +++ b/aries_cloudagent/admin/tests/test_auth.py @@ -0,0 +1,138 @@ +from unittest import IsolatedAsyncioTestCase + +from aiohttp import web + +from aries_cloudagent.tests import mock + +from ...core.in_memory.profile import InMemoryProfile +from ..decorators.auth import admin_authentication, tenant_authentication +from ..request_context import AdminRequestContext + + +class TestAdminAuthentication(IsolatedAsyncioTestCase): + def setUp(self) -> None: + + self.profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "admin_api_key", + "admin.admin_insecure_mode": False, + } + ) + self.context = AdminRequestContext.test_context({}, self.profile) + self.request_dict = { + "context": self.context, + } + self.request = mock.MagicMock( + __getitem__=lambda _, k: self.request_dict[k], headers={}, method="POST" + ) + self.decorated_handler = mock.CoroutineMock() + + async def test_options_request(self): + self.request = mock.MagicMock( + __getitem__=lambda _, k: self.request_dict[k], headers={}, method="OPTIONS" + ) + decor_func = admin_authentication(self.decorated_handler) + await decor_func(self.request) + self.decorated_handler.assert_called_once_with(self.request) + + async def test_insecure_mode(self): + self.profile.settings["admin.admin_insecure_mode"] = True + decor_func = admin_authentication(self.decorated_handler) + await decor_func(self.request) + self.decorated_handler.assert_called_once_with(self.request) + + async def test_invalid_api_key(self): + self.request = mock.MagicMock( + __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "wrong-key"}, + method="POST", + ) + decor_func = admin_authentication(self.decorated_handler) + with self.assertRaises(web.HTTPUnauthorized): + await decor_func(self.request) + + async def test_valid_api_key(self): + self.request = mock.MagicMock( + __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "admin_api_key"}, + method="POST", + ) + decor_func = admin_authentication(self.decorated_handler) + await decor_func(self.request) + self.decorated_handler.assert_called_once_with(self.request) + + +class TestTenantAuthentication(IsolatedAsyncioTestCase): + def setUp(self) -> None: + + self.profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "admin_api_key", + "admin.admin_insecure_mode": False, + "multitenant.enabled": True, + } + ) + self.context = AdminRequestContext.test_context({}, self.profile) + self.request_dict = { + "context": self.context, + } + self.request = mock.MagicMock( + __getitem__=lambda _, k: self.request_dict[k], headers={}, method="POST" + ) + self.decorated_handler = mock.CoroutineMock() + + async def test_options_request(self): + self.request = mock.MagicMock( + __getitem__=lambda _, k: self.request_dict[k], headers={}, method="OPTIONS" + ) + decor_func = tenant_authentication(self.decorated_handler) + await decor_func(self.request) + self.decorated_handler.assert_called_once_with(self.request) + + async def test_insecure_mode(self): + self.profile.settings["admin.admin_insecure_mode"] = True + decor_func = tenant_authentication(self.decorated_handler) + await decor_func(self.request) + self.decorated_handler.assert_called_once_with(self.request) + + async def test_single_tenant_invalid_api_key(self): + self.profile.settings["multitenant.enabled"] = False + self.request = mock.MagicMock( + __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "wrong-key"}, + method="POST", + ) + decor_func = tenant_authentication(self.decorated_handler) + with self.assertRaises(web.HTTPUnauthorized): + await decor_func(self.request) + + async def test_single_tenant_valid_api_key(self): + self.profile.settings["multitenant.enabled"] = False + self.request = mock.MagicMock( + __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "admin_api_key"}, + method="POST", + ) + decor_func = tenant_authentication(self.decorated_handler) + await decor_func(self.request) + self.decorated_handler.assert_called_once_with(self.request) + + async def test_multi_tenant_missing_auth_header(self): + self.request = mock.MagicMock( + __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "wrong-key"}, + method="POST", + ) + decor_func = tenant_authentication(self.decorated_handler) + with self.assertRaises(web.HTTPUnauthorized): + await decor_func(self.request) + + async def test_multi_tenant_valid_auth_header(self): + self.request = mock.MagicMock( + __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "admin_api_key", "Authorization": "Bearer my-jwt"}, + method="POST", + ) + decor_func = tenant_authentication(self.decorated_handler) + await decor_func(self.request) + self.decorated_handler.assert_called_once_with(self.request) diff --git a/aries_cloudagent/anoncreds/default/legacy_indy/registry.py b/aries_cloudagent/anoncreds/default/legacy_indy/registry.py index 5bed179f96..582a7ec0c6 100644 --- a/aries_cloudagent/anoncreds/default/legacy_indy/registry.py +++ b/aries_cloudagent/anoncreds/default/legacy_indy/registry.py @@ -3,11 +3,11 @@ import json import logging import re -import uuid from asyncio import shield from typing import List, Optional, Pattern, Sequence, Tuple from base58 import alphabet +from uuid_utils import uuid4 from ....anoncreds.default.legacy_indy.author import get_endorser_info from ....cache.base import BaseCache @@ -32,9 +32,7 @@ TransactionManagerError, ) from ....protocols.endorse_transaction.v1_0.util import is_author_role -from ....revocation_anoncreds.models.issuer_cred_rev_record import ( - IssuerCredRevRecord, -) +from ....revocation_anoncreds.models.issuer_cred_rev_record import IssuerCredRevRecord from ....revocation_anoncreds.recover import generate_ledger_rrrecovery_txn from ....storage.error import StorageError from ....utils import sentinel @@ -267,7 +265,7 @@ async def register_schema( # Need endorsement, so execute transaction flow (schema_id, schema_def) = result - job_id = uuid.uuid4().hex + job_id = uuid4().hex meta_data = {"context": {"job_id": job_id, "schema_id": schema_id}} transaction_manager = TransactionManager(profile) @@ -445,7 +443,7 @@ async def register_credential_definition( ) # Need endorsement, so execute transaction flow - job_id = uuid.uuid4().hex + job_id = uuid4().hex meta_data = { "context": { @@ -616,7 +614,7 @@ async def register_revocation_registry_definition( # Need endorsement, so execute transaction flow (rev_reg_def_id, reg_rev_def) = result - job_id = uuid.uuid4().hex + job_id = uuid4().hex meta_data = { "context": { "job_id": job_id, @@ -692,7 +690,7 @@ async def _get_or_fetch_rev_reg_def_max_cred_num( def _indexes_to_bit_array(self, indexes: List[int], size: int) -> List[int]: """Turn a sequence of indexes into a full state bit array.""" - return [1 if index in indexes else 0 for index in range(1, size + 1)] + return [1 if index in indexes else 0 for index in range(0, size + 1)] async def _get_ledger(self, profile: Profile, rev_reg_def_id: str): async with profile.session() as session: @@ -874,7 +872,7 @@ async def register_revocation_list( (rev_reg_def_id, requested_txn) = result - job_id = uuid.uuid4().hex + job_id = uuid4().hex meta_data = { "context": { "job_id": job_id, @@ -987,7 +985,7 @@ async def update_revocation_list( (rev_reg_def_id, requested_txn) = result - job_id = uuid.uuid4().hex + job_id = uuid4().hex meta_data = { "context": { "job_id": job_id, @@ -1123,7 +1121,7 @@ async def fix_ledger_entry( async def txn_submit( self, - profile: Profile, + ledger: BaseLedger, ledger_transaction: str, sign: bool = None, taa_accept: bool = None, @@ -1131,10 +1129,6 @@ async def txn_submit( write_ledger: bool = True, ) -> str: """Submit a transaction to the ledger.""" - ledger = profile.inject(BaseLedger) - - if not ledger: - raise LedgerError("No ledger available") try: async with ledger: diff --git a/aries_cloudagent/anoncreds/default/legacy_indy/tests/test_registry.py b/aries_cloudagent/anoncreds/default/legacy_indy/tests/test_registry.py index 58631cfadb..830a0bb722 100644 --- a/aries_cloudagent/anoncreds/default/legacy_indy/tests/test_registry.py +++ b/aries_cloudagent/anoncreds/default/legacy_indy/tests/test_registry.py @@ -9,7 +9,6 @@ from base58 import alphabet from .....anoncreds.base import ( - AnonCredsRegistrationError, AnonCredsSchemaAlreadyExists, ) from .....anoncreds.models.anoncreds_schema import ( @@ -21,7 +20,7 @@ from .....connections.models.conn_record import ConnRecord from .....core.in_memory.profile import InMemoryProfile from .....ledger.base import BaseLedger -from .....ledger.error import LedgerError, LedgerObjectAlreadyExistsError +from .....ledger.error import LedgerObjectAlreadyExistsError from .....messaging.responder import BaseResponder from .....protocols.endorse_transaction.v1_0.manager import ( TransactionManager, @@ -728,27 +727,16 @@ async def test_register_revocation_registry_definition_with_create_transaction_a assert mock_create_record.called async def test_txn_submit(self): - self.profile.inject = mock.MagicMock( - side_effect=[ - None, - mock.CoroutineMock( - txn_submit=mock.CoroutineMock(side_effect=LedgerError("test error")) - ), - mock.CoroutineMock( - txn_submit=mock.CoroutineMock(return_value="transaction response") - ), - ] + self.profile.context.injector.bind_instance( + BaseLedger, + mock.MagicMock( + txn_submit=mock.CoroutineMock(return_value="transaction_id") + ), ) - - # No ledger - with self.assertRaises(LedgerError): - await self.registry.txn_submit(self.profile, "test_txn") - # Write error - with self.assertRaises(AnonCredsRegistrationError): - await self.registry.txn_submit(self.profile, "test_txn") - - result = await self.registry.txn_submit(self.profile, "test_txn") - assert result == "transaction response" + async with self.profile.session() as session: + ledger = session.inject(BaseLedger) + result = await self.registry.txn_submit(ledger, "test_txn") + assert result == "transaction_id" async def test_register_revocation_list_no_endorsement(self): self.profile.context.injector.bind_instance( diff --git a/aries_cloudagent/anoncreds/holder.py b/aries_cloudagent/anoncreds/holder.py index 242feccc3f..ca4a4c3539 100644 --- a/aries_cloudagent/anoncreds/holder.py +++ b/aries_cloudagent/anoncreds/holder.py @@ -4,7 +4,6 @@ import json import logging import re -import uuid from typing import Dict, Optional, Sequence, Tuple, Union from anoncreds import ( @@ -14,10 +13,11 @@ CredentialRevocationState, Presentation, PresentCredentials, - create_link_secret, W3cCredential, + create_link_secret, ) from aries_askar import AskarError, AskarErrorCode +from uuid_utils import uuid4 from ..anoncreds.models.anoncreds_schema import AnonCredsSchema from ..askar.profile_anon import AskarAnoncredsProfile @@ -238,7 +238,7 @@ async def _finish_store_credential( f"Error parsing credential definition ID: {cred_def_id}" ) - credential_id = credential_id or str(uuid.uuid4()) + credential_id = credential_id or str(uuid4()) tags = { "schema_id": schema_id, "schema_issuer_did": schema_id_parts[1], @@ -315,9 +315,15 @@ async def store_credential_w3c( credential_definition, rev_reg_def, ) - # TODO we want to store the credential in the W3C format in the wallet, - # This will require changes to other endpoints that fetch credentials - cred_recvd = Credential.from_w3c(cred_w3c) + cred_legacy = Credential.from_w3c(cred_w3c) + cred_recvd = await asyncio.get_event_loop().run_in_executor( + None, + cred_legacy.process, + credential_request_metadata, + secret, + credential_definition, + rev_reg_def, + ) except AnoncredsError as err: raise AnonCredsHolderError("Error processing received credential") from err diff --git a/aries_cloudagent/anoncreds/revocation.py b/aries_cloudagent/anoncreds/revocation.py index 80e7d8c16e..ebe2923887 100644 --- a/aries_cloudagent/anoncreds/revocation.py +++ b/aries_cloudagent/anoncreds/revocation.py @@ -10,7 +10,6 @@ from pathlib import Path from typing import List, NamedTuple, Optional, Sequence, Tuple from urllib.parse import urlparse -from uuid import uuid4 import base58 from anoncreds import ( @@ -23,13 +22,11 @@ ) from aries_askar.error import AskarError from requests import RequestException, Session +from uuid_utils import uuid4 from aries_cloudagent.anoncreds.models.anoncreds_cred_def import CredDef -from ..askar.profile_anon import ( - AskarAnoncredsProfile, - AskarAnoncredsProfileSession, -) +from ..askar.profile_anon import AskarAnoncredsProfile, AskarAnoncredsProfileSession from ..core.error import BaseError from ..core.event_bus import Event, EventBus from ..core.profile import Profile, ProfileSession diff --git a/aries_cloudagent/anoncreds/routes.py b/aries_cloudagent/anoncreds/routes.py index 316cc208ba..2eec01d12e 100644 --- a/aries_cloudagent/anoncreds/routes.py +++ b/aries_cloudagent/anoncreds/routes.py @@ -13,6 +13,7 @@ ) from marshmallow import fields +from ..admin.decorators.auth import tenant_authentication from ..admin.request_context import AdminRequestContext from ..core.event_bus import EventBus from ..ledger.error import LedgerError @@ -145,6 +146,7 @@ class SchemaPostRequestSchema(OpenAPISchema): @docs(tags=["anoncreds - schemas"], summary="Create a schema on the connected ledger") @request_schema(SchemaPostRequestSchema()) @response_schema(SchemaResultSchema(), 200, description="") +@tenant_authentication async def schemas_post(request: web.BaseRequest): """Request handler for creating a schema. @@ -216,6 +218,7 @@ async def schemas_post(request: web.BaseRequest): @docs(tags=["anoncreds - schemas"], summary="Retrieve an individual schemas details") @match_info_schema(SchemaIdMatchInfo()) @response_schema(GetSchemaResultSchema(), 200, description="") +@tenant_authentication async def schema_get(request: web.BaseRequest): """Request handler for getting a schema. @@ -245,6 +248,7 @@ async def schema_get(request: web.BaseRequest): @docs(tags=["anoncreds - schemas"], summary="Retrieve all schema ids") @querystring_schema(SchemasQueryStringSchema()) @response_schema(GetSchemasResponseSchema(), 200, description="") +@tenant_authentication async def schemas_get(request: web.BaseRequest): """Request handler for getting all schemas. @@ -388,6 +392,7 @@ class CredDefsQueryStringSchema(OpenAPISchema): ) @request_schema(CredDefPostRequestSchema()) @response_schema(CredDefResultSchema(), 200, description="") +@tenant_authentication async def cred_def_post(request: web.BaseRequest): """Request handler for creating . @@ -439,6 +444,7 @@ async def cred_def_post(request: web.BaseRequest): ) @match_info_schema(CredIdMatchInfo()) @response_schema(GetCredDefResultSchema(), 200, description="") +@tenant_authentication async def cred_def_get(request: web.BaseRequest): """Request handler for getting credential definition. @@ -486,6 +492,7 @@ class GetCredDefsResponseSchema(OpenAPISchema): ) @querystring_schema(CredDefsQueryStringSchema()) @response_schema(GetCredDefsResponseSchema(), 200, description="") +@tenant_authentication async def cred_defs_get(request: web.BaseRequest): """Request handler for getting all credential definitions. @@ -576,6 +583,7 @@ class RevRegCreateRequestSchemaAnoncreds(OpenAPISchema): ) @request_schema(RevRegCreateRequestSchemaAnoncreds()) @response_schema(RevRegDefResultSchema(), 200, description="") +@tenant_authentication async def rev_reg_def_post(request: web.BaseRequest): """Request handler for creating revocation registry definition.""" context: AdminRequestContext = request["context"] @@ -659,6 +667,7 @@ class RevListCreateRequestSchema(OpenAPISchema): ) @request_schema(RevListCreateRequestSchema()) @response_schema(RevListResultSchema(), 200, description="") +@tenant_authentication async def rev_list_post(request: web.BaseRequest): """Request handler for creating registering a revocation list.""" context: AdminRequestContext = request["context"] @@ -694,6 +703,7 @@ async def rev_list_post(request: web.BaseRequest): ) @match_info_schema(RevRegIdMatchInfoSchema()) @response_schema(RevocationModuleResponseSchema(), description="") +@tenant_authentication async def upload_tails_file(request: web.BaseRequest): """Request handler to upload local tails file for revocation registry. @@ -729,6 +739,7 @@ async def upload_tails_file(request: web.BaseRequest): ) @match_info_schema(RevRegIdMatchInfoSchema()) @response_schema(RevocationModuleResponseSchema(), description="") +@tenant_authentication async def set_active_registry(request: web.BaseRequest): """Request handler to set the active registry. diff --git a/aries_cloudagent/anoncreds/tests/test_holder.py b/aries_cloudagent/anoncreds/tests/test_holder.py index cf36931d9c..549a5777d8 100644 --- a/aries_cloudagent/anoncreds/tests/test_holder.py +++ b/aries_cloudagent/anoncreds/tests/test_holder.py @@ -25,12 +25,12 @@ MOCK_PRES, MOCK_PRES_REQ, ) +from aries_cloudagent.askar.profile import AskarProfile from aries_cloudagent.askar.profile_anon import AskarAnoncredsProfile from aries_cloudagent.core.in_memory.profile import ( InMemoryProfile, InMemoryProfileSession, ) -from aries_cloudagent.indy.sdk.profile import IndySdkProfile from aries_cloudagent.tests import mock from aries_cloudagent.wallet.error import WalletNotFoundError @@ -202,7 +202,7 @@ async def test_create_credential_request( async def test_create_credential_request_with_non_anoncreds_profile_throws_x(self): self.profile = InMemoryProfile.test_profile( settings={"wallet-type": "askar"}, - profile_class=IndySdkProfile, + profile_class=AskarProfile, ) self.holder = test_module.AnonCredsHolder(self.profile) with self.assertRaises(ValueError): diff --git a/aries_cloudagent/anoncreds/tests/test_issuer.py b/aries_cloudagent/anoncreds/tests/test_issuer.py index 10ee1ebb75..b1ba1517a3 100644 --- a/aries_cloudagent/anoncreds/tests/test_issuer.py +++ b/aries_cloudagent/anoncreds/tests/test_issuer.py @@ -25,6 +25,9 @@ SchemaResult, SchemaState, ) +from aries_cloudagent.askar.profile import ( + AskarProfile, +) from aries_cloudagent.askar.profile_anon import ( AskarAnoncredsProfile, ) @@ -33,7 +36,6 @@ InMemoryProfile, InMemoryProfileSession, ) -from aries_cloudagent.indy.sdk.profile import IndySdkProfile from aries_cloudagent.tests import mock from .. import issuer as test_module @@ -135,9 +137,7 @@ async def test_init(self): assert isinstance(self.issuer.profile, AskarAnoncredsProfile) async def test_init_wrong_profile_type(self): - self.issuer._profile = InMemoryProfile.test_profile( - profile_class=IndySdkProfile - ) + self.issuer._profile = InMemoryProfile.test_profile(profile_class=AskarProfile) with self.assertRaises(ValueError): self.issuer.profile diff --git a/aries_cloudagent/anoncreds/tests/test_revocation_setup.py b/aries_cloudagent/anoncreds/tests/test_revocation_setup.py index e21c09f305..24a6f471a6 100644 --- a/aries_cloudagent/anoncreds/tests/test_revocation_setup.py +++ b/aries_cloudagent/anoncreds/tests/test_revocation_setup.py @@ -37,7 +37,6 @@ async def asyncSetUp(self) -> None: async def test_on_cred_def_support_revocation_registers_revocation_def( self, mock_register_revocation_registry_definition ): - event = CredDefFinishedEvent( CredDefFinishedPayload( schema_id="schema_id", @@ -60,7 +59,6 @@ async def test_on_cred_def_support_revocation_registers_revocation_def( async def test_on_cred_def_author_with_auto_create_rev_reg_config_registers_reg_def( self, mock_register_revocation_registry_definition ): - self.profile.settings["endorser.author"] = True self.profile.settings["endorser.auto_create_rev_reg"] = True event = CredDefFinishedEvent( @@ -85,7 +83,6 @@ async def test_on_cred_def_author_with_auto_create_rev_reg_config_registers_reg_ async def test_on_cred_def_author_with_auto_create_rev_reg_config_and_support_revoc_option_registers_reg_def( self, mock_register_revocation_registry_definition ): - self.profile.settings["endorser.author"] = True self.profile.settings["endorser.auto_create_rev_reg"] = True event = CredDefFinishedEvent( @@ -110,7 +107,6 @@ async def test_on_cred_def_author_with_auto_create_rev_reg_config_and_support_re async def test_on_cred_def_not_author_or_support_rev_option( self, mock_register_revocation_registry_definition ): - event = CredDefFinishedEvent( CredDefFinishedPayload( schema_id="schema_id", diff --git a/aries_cloudagent/anoncreds/tests/test_routes.py b/aries_cloudagent/anoncreds/tests/test_routes.py index 5e90463b54..288f7f7eba 100644 --- a/aries_cloudagent/anoncreds/tests/test_routes.py +++ b/aries_cloudagent/anoncreds/tests/test_routes.py @@ -54,7 +54,10 @@ class TestAnoncredsRoutes(IsolatedAsyncioTestCase): async def asyncSetUp(self) -> None: self.session_inject = {} self.profile = InMemoryProfile.test_profile( - settings={"wallet.type": "askar-anoncreds"}, + settings={ + "wallet.type": "askar-anoncreds", + "admin.admin_api_key": "secret-key", + }, profile_class=AskarAnoncredsProfile, ) self.context = AdminRequestContext.test_context( @@ -69,6 +72,7 @@ async def asyncSetUp(self) -> None: query={}, __getitem__=lambda _, k: self.request_dict[k], context=self.context, + headers={"x-api-key": "secret-key"}, ) @mock.patch.object( @@ -342,7 +346,7 @@ async def test_set_active_registry(self, mock_set): async def test_schema_endpoints_wrong_profile_403(self): self.profile = InMemoryProfile.test_profile( - settings={"wallet-type": "askar"}, + settings={"wallet-type": "askar", "admin.admin_api_key": "secret-key"}, profile_class=AskarProfile, ) self.context = AdminRequestContext.test_context({}, self.profile) @@ -355,6 +359,7 @@ async def test_schema_endpoints_wrong_profile_403(self): query={}, __getitem__=lambda _, k: self.request_dict[k], context=self.context, + headers={"x-api-key": "secret-key"}, ) # POST schema @@ -382,7 +387,7 @@ async def test_schema_endpoints_wrong_profile_403(self): async def test_cred_def_endpoints_wrong_profile_403(self): self.profile = InMemoryProfile.test_profile( - settings={"wallet-type": "askar"}, + settings={"wallet-type": "askar", "admin.admin_api_key": "secret-key"}, profile_class=AskarProfile, ) self.context = AdminRequestContext.test_context({}, self.profile) @@ -395,6 +400,7 @@ async def test_cred_def_endpoints_wrong_profile_403(self): query={}, __getitem__=lambda _, k: self.request_dict[k], context=self.context, + headers={"x-api-key": "secret-key"}, ) # POST cred def @@ -425,7 +431,7 @@ async def test_cred_def_endpoints_wrong_profile_403(self): async def test_rev_reg_wrong_profile_403(self): self.profile = InMemoryProfile.test_profile( - settings={"wallet-type": "askar"}, + settings={"wallet-type": "askar", "admin.admin_api_key": "secret-key"}, profile_class=AskarProfile, ) self.context = AdminRequestContext.test_context({}, self.profile) @@ -438,6 +444,7 @@ async def test_rev_reg_wrong_profile_403(self): query={}, __getitem__=lambda _, k: self.request_dict[k], context=self.context, + headers={"x-api-key": "secret-key"}, ) self.request.json = mock.CoroutineMock( @@ -458,7 +465,7 @@ async def test_rev_reg_wrong_profile_403(self): async def test_rev_list_wrong_profile_403(self): self.profile = InMemoryProfile.test_profile( - settings={"wallet-type": "askar"}, + settings={"wallet-type": "askar", "admin.admin_api_key": "secret-key"}, profile_class=AskarProfile, ) self.context = AdminRequestContext.test_context({}, self.profile) @@ -471,6 +478,7 @@ async def test_rev_list_wrong_profile_403(self): query={}, __getitem__=lambda _, k: self.request_dict[k], context=self.context, + headers={"x-api-key": "secret-key"}, ) self.request.json = mock.CoroutineMock( @@ -481,7 +489,7 @@ async def test_rev_list_wrong_profile_403(self): async def test_uploads_tails_wrong_profile_403(self): self.profile = InMemoryProfile.test_profile( - settings={"wallet-type": "askar"}, + settings={"wallet-type": "askar", "admin.admin_api_key": "secret-key"}, profile_class=AskarProfile, ) self.context = AdminRequestContext.test_context({}, self.profile) @@ -494,6 +502,7 @@ async def test_uploads_tails_wrong_profile_403(self): query={}, __getitem__=lambda _, k: self.request_dict[k], context=self.context, + headers={"x-api-key": "secret-key"}, ) self.request.match_info = {"rev_reg_id": "rev_reg_id"} @@ -502,7 +511,7 @@ async def test_uploads_tails_wrong_profile_403(self): async def test_active_registry_wrong_profile_403(self): self.profile = InMemoryProfile.test_profile( - settings={"wallet-type": "askar"}, + settings={"wallet-type": "askar", "admin.admin_api_key": "secret-key"}, profile_class=AskarProfile, ) self.context = AdminRequestContext.test_context({}, self.profile) @@ -515,6 +524,7 @@ async def test_active_registry_wrong_profile_403(self): query={}, __getitem__=lambda _, k: self.request_dict[k], context=self.context, + headers={"x-api-key": "secret-key"}, ) self.request.match_info = {"rev_reg_id": "rev_reg_id"} diff --git a/aries_cloudagent/commands/tests/test_provision.py b/aries_cloudagent/commands/tests/test_provision.py index 15b7438791..9afd7cde98 100644 --- a/aries_cloudagent/commands/tests/test_provision.py +++ b/aries_cloudagent/commands/tests/test_provision.py @@ -1,5 +1,3 @@ -import pytest - from aries_cloudagent.tests import mock from unittest import IsolatedAsyncioTestCase @@ -19,26 +17,6 @@ def test_bad_calls(self): with self.assertRaises(SystemExit): test_module.execute(["bad"]) - @pytest.mark.indy - def test_provision_wallet(self): - test_seed = "testseed000000000000000000000001" - test_module.execute( - [ - "--wallet-type", - "indy", - "--wallet-name", - "test_wallet", - "--wallet-key", - "key", - "--seed", - test_seed, - "--no-ledger", - "--endpoint", - "test_endpoint", - "--recreate-wallet", - ] - ) - async def test_provision_ledger_configured(self): profile = mock.MagicMock(close=mock.CoroutineMock()) with mock.patch.object( diff --git a/aries_cloudagent/config/argparse.py b/aries_cloudagent/config/argparse.py index ed8b1fe948..96e5961f1c 100644 --- a/aries_cloudagent/config/argparse.py +++ b/aries_cloudagent/config/argparse.py @@ -585,6 +585,7 @@ def add_arguments(self, parser: ArgumentParser): metavar="", env_var="ACAPY_STORAGE_TYPE", help=( + "DEPRECATED: This option is ignored. " "Specifies the type of storage provider to use for the internal " "storage engine. This storage interface is used to store internal " "state. Supported internal storage types are 'basic' (memory) " @@ -640,14 +641,16 @@ def add_arguments(self, parser: ArgumentParser): "resolver instance." ), ) - parser.add_argument( - "--universal-resolver-bearer-token", - type=str, - nargs="?", - metavar="", - env_var="ACAPY_UNIVERSAL_RESOLVER_BEARER_TOKEN", - help="Bearer token if universal resolver instance requires authentication.", - ), + ( + parser.add_argument( + "--universal-resolver-bearer-token", + type=str, + nargs="?", + metavar="", + env_var="ACAPY_UNIVERSAL_RESOLVER_BEARER_TOKEN", + help="Bearer token if universal resolver instance requires authentication.", # noqa: E501 + ), + ) def get_settings(self, args: Namespace) -> dict: """Extract general settings.""" @@ -1573,10 +1576,10 @@ def add_arguments(self, parser: ArgumentParser): default="basic", env_var="ACAPY_WALLET_TYPE", help=( - "Specifies the type of Indy wallet provider to use. " + "Specifies the type of wallet provider to use. " "Supported internal storage types are 'basic' (memory), 'askar' " "and 'askar-anoncreds'." - "The default (if not specified) is 'basic'. 'indy' is deprecated." + "The default (if not specified) is 'basic'." ), ) parser.add_argument( @@ -1600,10 +1603,7 @@ def add_arguments(self, parser: ArgumentParser): help=( "Specifies the storage configuration to use for the wallet. " "This is required if you are for using 'postgres_storage' wallet " - 'storage type. For example, \'{"url":"localhost:5432", ' - '"wallet_scheme":"MultiWalletSingleTable"}\'. This ' - "configuration maps to the indy sdk postgres plugin " - "(PostgresConfig)." + 'storage type. For example, \'{"url":"localhost:5432"}\'.' ), ) parser.add_argument( @@ -1627,9 +1627,8 @@ def add_arguments(self, parser: ArgumentParser): "This is required if you are for using 'postgres_storage' wallet " 'For example, \'{"account":"postgres","password": ' '"mysecretpassword","admin_account":"postgres", ' - '"admin_password":"mysecretpassword"}\'. This configuration maps ' - "to the indy sdk postgres plugin (PostgresCredentials). NOTE: " - "admin_user must have the CREATEDB role or else initialization " + '"admin_password":"mysecretpassword"}\'.' + "NOTE: admin_user must have the CREATEDB role or else initialization " "will fail." ), ) @@ -1683,7 +1682,7 @@ def get_settings(self, args: Namespace) -> dict: if args.recreate_wallet: settings["wallet.recreate"] = True # check required settings for persistent wallets - if settings["wallet.type"] in ["indy", "askar", "askar-anoncreds"]: + if settings["wallet.type"] in ["askar", "askar-anoncreds"]: # requires name, key if not args.wallet_name or not args.wallet_key: raise ArgsParseError( @@ -1698,7 +1697,7 @@ def get_settings(self, args: Namespace) -> dict: if not args.wallet_storage_config or not args.wallet_storage_creds: raise ArgsParseError( "Parameters --wallet-storage-config and --wallet-storage-creds " - "must be provided for indy postgres wallets" + "must be provided for postgres wallets" ) return settings diff --git a/aries_cloudagent/config/default_context.py b/aries_cloudagent/config/default_context.py index a14de29720..3bd323a171 100644 --- a/aries_cloudagent/config/default_context.py +++ b/aries_cloudagent/config/default_context.py @@ -15,7 +15,6 @@ from ..resolver.did_resolver import DIDResolver from ..tails.base import BaseTailsServer from ..transport.wire_format import BaseWireFormat -from ..utils.dependencies import is_indy_sdk_module_installed from ..utils.stats import Collector from ..wallet.default_verification_key_strategy import ( BaseVerificationKeyStrategy, @@ -70,18 +69,6 @@ async def build_context(self) -> InjectionContext: async def bind_providers(self, context: InjectionContext): """Bind various class providers.""" - # Bind global indy pool provider to be able to share pools between wallets - # It is important the ledger pool provider is available in the base context - # so it can be shared by all wallet instances. If we set it in the indy sdk - # profile provider it could mean other wallets won't have access to the provider - if is_indy_sdk_module_installed(): - from ..ledger.indy import IndySdkLedgerPool, IndySdkLedgerPoolProvider - - context.injector.bind_provider( - IndySdkLedgerPool, - CachedProvider(IndySdkLedgerPoolProvider(), ("ledger.pool_name",)), - ) - context.injector.bind_provider(ProfileManager, ProfileManagerProvider()) wallet_type = self.settings.get("wallet.type") diff --git a/aries_cloudagent/config/injection_context.py b/aries_cloudagent/config/injection_context.py index fdadc88224..bdd91de149 100644 --- a/aries_cloudagent/config/injection_context.py +++ b/aries_cloudagent/config/injection_context.py @@ -21,12 +21,14 @@ class InjectionContext(BaseInjector): ROOT_SCOPE = "application" def __init__( - self, *, settings: Mapping[str, object] = None, enforce_typing: bool = True + self, + *, + settings: Optional[Mapping[str, object]] = None, + enforce_typing: bool = True ): """Initialize a `ServiceConfig`.""" self._injector = Injector(settings, enforce_typing=enforce_typing) self._scope_name = InjectionContext.ROOT_SCOPE - self._scopes = [] @property def injector(self) -> Injector: @@ -38,16 +40,6 @@ def injector(self, injector: Injector): """Setter for scope-specific injector.""" self._injector = injector - @property - def scope_name(self) -> str: - """Accessor for the current scope name.""" - return self._scope_name - - @scope_name.setter - def scope_name(self, scope_name: str): - """Accessor for the current scope name.""" - self._scope_name = scope_name - @property def settings(self) -> Settings: """Accessor for scope-specific settings.""" @@ -64,7 +56,7 @@ def update_settings(self, settings: Mapping[str, object]): self.injector.settings.update(settings) def start_scope( - self, scope_name: str, settings: Optional[Mapping[str, object]] = None + self, settings: Optional[Mapping[str, object]] = None ) -> "InjectionContext": """Begin a new named scope. @@ -76,39 +68,15 @@ def start_scope( A new injection context representing the scope """ - if not scope_name: - raise InjectionContextError("Scope name must be non-empty") - if self._scope_name == scope_name: - raise InjectionContextError("Cannot re-enter scope: {}".format(scope_name)) - for scope in self._scopes: - if scope.name == scope_name: - raise InjectionContextError( - "Cannot re-enter scope: {}".format(scope_name) - ) result = self.copy() - result._scopes.append(Scope(name=self.scope_name, injector=self.injector)) - result._scope_name = scope_name if settings: result.update_settings(settings) return result - def injector_for_scope(self, scope_name: str) -> Injector: - """Fetch the injector for a specific scope. - - Args: - scope_name: The unique scope identifier - """ - if scope_name == self.scope_name: - return self.injector - for scope in self._scopes: - if scope.name == scope_name: - return scope.injector - return None - def inject( self, base_cls: Type[InjectType], - settings: Mapping[str, object] = None, + settings: Optional[Mapping[str, object]] = None, ) -> InjectType: """Get the provided instance of a given class identifier. @@ -125,7 +93,7 @@ def inject( def inject_or( self, base_cls: Type[InjectType], - settings: Mapping[str, object] = None, + settings: Optional[Mapping[str, object]] = None, default: Optional[InjectType] = None, ) -> Optional[InjectType]: """Get the provided instance of a given class identifier or default if not found. @@ -145,5 +113,4 @@ def copy(self) -> "InjectionContext": """Produce a copy of the injector instance.""" result = copy.copy(self) result._injector = self.injector.copy() - result._scopes = self._scopes.copy() return result diff --git a/aries_cloudagent/config/injector.py b/aries_cloudagent/config/injector.py index 4d99f8d09c..9c47bd1ec5 100644 --- a/aries_cloudagent/config/injector.py +++ b/aries_cloudagent/config/injector.py @@ -11,7 +11,10 @@ class Injector(BaseInjector): """Injector implementation with static and dynamic bindings.""" def __init__( - self, settings: Mapping[str, object] = None, *, enforce_typing: bool = True + self, + settings: Optional[Mapping[str, object]] = None, + *, + enforce_typing: bool = True, ): """Initialize an `Injector`.""" self.enforce_typing = enforce_typing @@ -54,7 +57,7 @@ def get_provider(self, base_cls: Type[InjectType]): def inject_or( self, base_cls: Type[InjectType], - settings: Mapping[str, object] = None, + settings: Optional[Mapping[str, object]] = None, default: Optional[InjectType] = None, ) -> Optional[InjectType]: """Get the provided instance of a given class identifier or default if not found. @@ -92,7 +95,7 @@ def inject_or( def inject( self, base_cls: Type[InjectType], - settings: Mapping[str, object] = None, + settings: Optional[Mapping[str, object]] = None, ) -> InjectType: """Get the provided instance of a given class identifier. diff --git a/aries_cloudagent/config/ledger.py b/aries_cloudagent/config/ledger.py index 9e77599176..2662471666 100644 --- a/aries_cloudagent/config/ledger.py +++ b/aries_cloudagent/config/ledger.py @@ -1,25 +1,24 @@ """Ledger configuration.""" -from collections import OrderedDict import logging import re import sys +from collections import OrderedDict from typing import Optional -import uuid import markdown import prompt_toolkit from prompt_toolkit.eventloop.defaults import use_asyncio_event_loop from prompt_toolkit.formatted_text import HTML +from uuid_utils import uuid4 from ..config.settings import Settings from ..core.profile import Profile from ..ledger.base import BaseLedger from ..ledger.endpoint_type import EndpointType from ..ledger.error import LedgerError -from ..utils.http import fetch, FetchError +from ..utils.http import FetchError, fetch from ..wallet.base import BaseWallet - from .base import ConfigError LOGGER = logging.getLogger(__name__) @@ -88,7 +87,7 @@ async def load_multiple_genesis_transactions_from_config(settings: Settings): is_write_ledger = ( False if config.get("is_write") is None else config.get("is_write") ) - ledger_id = config.get("id") or str(uuid.uuid4()) + ledger_id = config.get("id") or str(uuid4()) if is_write_ledger: write_ledger_set = True config_item = { diff --git a/aries_cloudagent/config/tests/test_injection_context.py b/aries_cloudagent/config/tests/test_injection_context.py index e6bd1fd4f0..f68c7f6948 100644 --- a/aries_cloudagent/config/tests/test_injection_context.py +++ b/aries_cloudagent/config/tests/test_injection_context.py @@ -1,7 +1,7 @@ from unittest import IsolatedAsyncioTestCase from ..base import InjectionError -from ..injection_context import InjectionContext, InjectionContextError +from ..injection_context import InjectionContext class TestInjectionContext(IsolatedAsyncioTestCase): @@ -14,39 +14,16 @@ def setUp(self): def test_settings_init(self): """Test settings initialization.""" - assert self.test_instance.scope_name == self.test_instance.ROOT_SCOPE for key in self.test_settings: assert key in self.test_instance.settings assert self.test_instance.settings[key] == self.test_settings[key] - def test_simple_scope(self): - """Test scope entrance and exit.""" - with self.assertRaises(InjectionContextError): - self.test_instance.start_scope(None) - with self.assertRaises(InjectionContextError): - self.test_instance.start_scope(self.test_instance.ROOT_SCOPE) - - injector = self.test_instance.injector_for_scope(self.test_instance.ROOT_SCOPE) - assert injector == self.test_instance.injector - assert self.test_instance.injector_for_scope("no such scope") is None - - context = self.test_instance.start_scope(self.test_scope) - assert context.scope_name == self.test_scope - context.scope_name = "Bob" - assert context.scope_name == "Bob" - - with self.assertRaises(InjectionContextError): - context.start_scope(self.test_instance.ROOT_SCOPE) - assert self.test_instance.scope_name == self.test_instance.ROOT_SCOPE - def test_settings_scope(self): """Test scoped settings.""" upd_settings = {self.test_key: "NEWVAL"} - context = self.test_instance.start_scope(self.test_scope, upd_settings) + context = self.test_instance.start_scope(upd_settings) assert context.settings[self.test_key] == "NEWVAL" assert self.test_instance.settings[self.test_key] == self.test_value - root = context.injector_for_scope(context.ROOT_SCOPE) - assert root.settings[self.test_key] == self.test_value context.settings = upd_settings assert context.settings == upd_settings @@ -64,11 +41,8 @@ async def test_inject_simple(self): async def test_inject_scope(self): """Test a scoped injection.""" - context = self.test_instance.start_scope(self.test_scope) + context = self.test_instance.start_scope() assert context.inject_or(str) is None context.injector.bind_instance(str, self.test_value) assert context.inject(str) is self.test_value assert self.test_instance.inject_or(str) is None - root = context.injector_for_scope(context.ROOT_SCOPE) - assert root.inject_or(str) is None - assert self.test_instance.inject_or(str) is None diff --git a/aries_cloudagent/core/conductor.py b/aries_cloudagent/core/conductor.py index d1f44e68ab..9c60a32606 100644 --- a/aries_cloudagent/core/conductor.py +++ b/aries_cloudagent/core/conductor.py @@ -7,6 +7,7 @@ """ +import asyncio import hashlib import json import logging @@ -17,12 +18,8 @@ from ..admin.base_server import BaseAdminServer from ..admin.server import AdminResponder, AdminServer -from ..commands.upgrade import ( - add_version_record, - get_upgrade_version_list, - upgrade, -) -from ..config.default_context import ContextBuilder +from ..commands.upgrade import add_version_record, get_upgrade_version_list, upgrade +from ..config.default_context import ContextBuilder, DefaultContextBuilder from ..config.injection_context import InjectionContext from ..config.ledger import ( get_genesis_transactions, @@ -63,7 +60,11 @@ from ..storage.base import BaseStorage from ..storage.error import StorageNotFoundError from ..storage.record import StorageRecord -from ..storage.type import RECORD_TYPE_ACAPY_STORAGE_TYPE +from ..storage.type import ( + RECORD_TYPE_ACAPY_STORAGE_TYPE, + STORAGE_TYPE_VALUE_ANONCREDS, + STORAGE_TYPE_VALUE_ASKAR, +) from ..transport.inbound.manager import InboundTransportManager from ..transport.inbound.message import InboundMessage from ..transport.outbound.base import OutboundDeliveryError @@ -71,10 +72,12 @@ from ..transport.outbound.message import OutboundMessage from ..transport.outbound.status import OutboundSendStatus from ..transport.wire_format import BaseWireFormat +from ..utils.profiles import get_subwallet_profiles_from_storage from ..utils.stats import Collector from ..utils.task_queue import CompletedTask, TaskQueue from ..vc.ld_proofs.document_loader import DocumentLoader from ..version import RECORD_TYPE_ACAPY_VERSION, __version__ +from ..wallet.anoncreds_upgrade import upgrade_wallet_to_anoncreds_if_requested from ..wallet.did_info import DIDInfo from .dispatcher import Dispatcher from .error import StartupError @@ -111,6 +114,8 @@ def __init__(self, context_builder: ContextBuilder) -> None: self.root_profile: Profile = None self.setup_public_did: DIDInfo = None + force_agent_anoncreds = False + @property def context(self) -> InjectionContext: """Accessor for the injection context.""" @@ -121,6 +126,9 @@ async def setup(self): context = await self.context_builder.build_context() + if self.force_agent_anoncreds: + context.settings.set_value("wallet.type", "askar-anoncreds") + # Fetch genesis transactions if necessary if context.settings.get("ledger.ledger_config_list"): await load_multiple_genesis_transactions_from_config(context.settings) @@ -168,17 +176,6 @@ async def setup(self): self.root_profile, ), ) - elif ( - self.root_profile.BACKEND_NAME == "indy" - and ledger.BACKEND_NAME == "indy" - ): - context.injector.bind_provider( - IndyVerifier, - ClassProvider( - "aries_cloudagent.indy.sdk.verifier.IndySdkVerifier", - self.root_profile, - ), - ) else: raise MultipleLedgerManagerError( "Multiledger is supported only for Indy SDK or Askar " @@ -522,7 +519,9 @@ async def start(self) -> None: except Exception: LOGGER.exception("Error accepting mediation invitation") - # notify protocols of startup status + await self.check_for_wallet_upgrades_in_progress() + + # notify protcols of startup status await self.root_profile.notify(STARTUP_EVENT_TOPIC, {}) async def stop(self, timeout=1.0): @@ -796,8 +795,9 @@ async def check_for_valid_wallet_type(self, profile): ) except StorageNotFoundError: acapy_version = None + # Any existing agent will have acapy_version record if acapy_version: - storage_type_from_storage = "askar" + storage_type_from_storage = STORAGE_TYPE_VALUE_ASKAR LOGGER.info( f"Existing agent found. Setting wallet type to {storage_type_from_storage}." # noqa: E501 ) @@ -820,6 +820,38 @@ async def check_for_valid_wallet_type(self, profile): ) if storage_type_from_storage != storage_type_from_config: - raise StartupError( - f"Wallet type config [{storage_type_from_config}] doesn't match with the wallet type in storage [{storage_type_record.value}]" # noqa: E501 - ) + if ( + storage_type_from_config == STORAGE_TYPE_VALUE_ASKAR + and storage_type_from_storage == STORAGE_TYPE_VALUE_ANONCREDS + ): + LOGGER.warning( + "The agent has been upgrade to use anoncreds wallet. Please update the wallet.type in the config file to 'askar-anoncreds'" # noqa: E501 + ) + # Allow agent to create anoncreds profile with askar + # wallet type config by stopping conductor and reloading context + await self.stop() + self.force_agent_anoncreds = True + self.context.settings.set_value("wallet.type", "askar-anoncreds") + self.context_builder = DefaultContextBuilder(self.context.settings) + await self.setup() + else: + raise StartupError( + f"Wallet type config [{storage_type_from_config}] doesn't match with the wallet type in storage [{storage_type_record.value}]" # noqa: E501 + ) + + async def check_for_wallet_upgrades_in_progress(self): + """Check for upgrade and upgrade if needed.""" + multitenant_mgr = self.context.inject_or(BaseMultitenantManager) + if multitenant_mgr: + subwallet_profiles = await get_subwallet_profiles_from_storage( + self.root_profile + ) + await asyncio.gather( + *[ + upgrade_wallet_to_anoncreds_if_requested(profile, is_subwallet=True) + for profile in subwallet_profiles + ] + ) + + else: + await upgrade_wallet_to_anoncreds_if_requested(self.root_profile) diff --git a/aries_cloudagent/core/profile.py b/aries_cloudagent/core/profile.py index 7b2b2f50da..7030769003 100644 --- a/aries_cloudagent/core/profile.py +++ b/aries_cloudagent/core/profile.py @@ -1,8 +1,9 @@ """Classes for managing profile information within a request context.""" -import logging from abc import ABC, abstractmethod +import logging from typing import Any, Mapping, Optional, Type +from weakref import ref from ..config.base import InjectionError from ..config.injection_context import InjectionContext @@ -30,10 +31,13 @@ def __init__( created: bool = False, ): """Initialize a base profile.""" - self._context = context or InjectionContext() self._created = created self._name = name or Profile.DEFAULT_NAME + context = context or InjectionContext() + self._context = context.start_scope() + self._context.injector.bind_instance(Profile, ref(self)) + @property def backend(self) -> str: """Accessor for the backend implementation name.""" @@ -159,10 +163,12 @@ def __init__( self._active = False self._awaited = False self._entered = 0 - self._context = (context or profile.context).start_scope("session", settings) + self._context = (context or profile.context).start_scope(settings) self._profile = profile self._events = [] + self._context.injector.bind_instance(ProfileSession, ref(self)) + async def _setup(self): """Create the underlying session or transaction.""" @@ -331,7 +337,6 @@ class ProfileManagerProvider(BaseProvider): "askar": "aries_cloudagent.askar.profile.AskarProfileManager", "askar-anoncreds": "aries_cloudagent.askar.profile_anon.AskarAnonProfileManager", "in_memory": "aries_cloudagent.core.in_memory.InMemoryProfileManager", - "indy": "aries_cloudagent.indy.sdk.profile.IndySdkProfileManager", } def __init__(self): diff --git a/aries_cloudagent/core/tests/test_conductor.py b/aries_cloudagent/core/tests/test_conductor.py index 55a8ab0c4b..685e420005 100644 --- a/aries_cloudagent/core/tests/test_conductor.py +++ b/aries_cloudagent/core/tests/test_conductor.py @@ -117,6 +117,8 @@ async def test_startup_version_record_exists(self): ) as mock_outbound_mgr, mock.patch.object( test_module, "LoggingConfigurator", autospec=True ) as mock_logger, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -166,6 +168,7 @@ async def test_startup_version_record_exists(self): mock_inbound_mgr.return_value.stop.assert_awaited_once_with() mock_outbound_mgr.return_value.stop.assert_awaited_once_with() + assert mock_upgrade.called async def test_startup_version_no_upgrade_add_record(self): builder: ContextBuilder = StubContextBuilder(self.test_settings) @@ -176,6 +179,8 @@ async def test_startup_version_no_upgrade_add_record(self): ) as mock_inbound_mgr, mock.patch.object( test_module, "OutboundTransportManager", autospec=True ) as mock_outbound_mgr, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -213,6 +218,8 @@ async def test_startup_version_no_upgrade_add_record(self): ) as mock_inbound_mgr, mock.patch.object( test_module, "OutboundTransportManager", autospec=True ) as mock_outbound_mgr, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -257,6 +264,8 @@ async def test_startup_version_force_upgrade(self): ) as mock_outbound_mgr, mock.patch.object( test_module, "LoggingConfigurator", autospec=True ) as mock_logger, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -296,6 +305,8 @@ async def test_startup_version_force_upgrade(self): ) as mock_outbound_mgr, mock.patch.object( test_module, "LoggingConfigurator", autospec=True ) as mock_logger, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -335,6 +346,8 @@ async def test_startup_version_force_upgrade(self): ) as mock_outbound_mgr, mock.patch.object( test_module, "LoggingConfigurator", autospec=True ) as mock_logger, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -373,6 +386,8 @@ async def test_startup_version_record_not_exists(self): ) as mock_outbound_mgr, mock.patch.object( test_module, "LoggingConfigurator", autospec=True ) as mock_logger, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -449,6 +464,8 @@ async def test_startup_no_public_did(self): ) as mock_outbound_mgr, mock.patch.object( test_module, "LoggingConfigurator", autospec=True ) as mock_logger, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -492,6 +509,8 @@ async def test_stats(self): ) as mock_inbound_mgr, mock.patch.object( test_module, "OutboundTransportManager", autospec=True ) as mock_outbound_mgr, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( test_module, "LoggingConfigurator", autospec=True ) as mock_logger: mock_inbound_mgr.return_value.sessions = ["dummy"] @@ -884,6 +903,8 @@ async def test_admin(self): ) as admin_start, mock.patch.object( admin, "stop", autospec=True ) as admin_stop, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -936,6 +957,8 @@ async def test_admin_startx(self): ) as oob_mgr, mock.patch.object( test_module, "ConnectionManager" ) as conn_mgr, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -992,7 +1015,9 @@ async def test_start_static(self): ), ), mock.patch.object( test_module, "OutboundTransportManager", autospec=True - ) as mock_outbound_mgr: + ) as mock_outbound_mgr, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade: mock_outbound_mgr.return_value.registered_transports = { "test": mock.MagicMock(schemes=["http"]) } @@ -1166,7 +1191,9 @@ async def test_print_invite_connection(self): ), ), mock.patch.object( test_module, "OutboundTransportManager", autospec=True - ) as mock_outbound_mgr: + ) as mock_outbound_mgr, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade: mock_outbound_mgr.return_value.registered_transports = { "test": mock.MagicMock(schemes=["http"]) } @@ -1203,6 +1230,8 @@ async def test_clear_default_mediator(self): "MediationManager", return_value=mock.MagicMock(clear_default_mediator=mock.CoroutineMock()), ) as mock_mgr, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -1254,7 +1283,9 @@ async def test_set_default_mediator(self): mock.MagicMock(value=f"v{__version__}"), ] ), - ): + ), mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade: await conductor.start() await conductor.stop() mock_mgr.return_value.set_default_mediator_by_id.assert_called_once() @@ -1277,6 +1308,8 @@ async def test_set_default_mediator_x(self): "retrieve_by_id", mock.CoroutineMock(side_effect=Exception()), ), mock.patch.object(test_module, "LOGGER") as mock_logger, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -1425,6 +1458,8 @@ async def test_mediator_invitation_0160(self, mock_from_url, _): ) as mock_mgr, mock.patch.object( mock_conn_record, "metadata_set", mock.CoroutineMock() ), mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -1484,6 +1519,8 @@ async def test_mediator_invitation_0434(self, mock_from_url, _): ) ), ) as mock_mgr, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -1542,6 +1579,8 @@ async def test_mediation_invitation_should_use_stored_invitation( ), mock.patch.object( test_module, "MediationManager", return_value=mock_mediation_manager ), mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -1596,7 +1635,9 @@ async def test_mediation_invitation_should_not_create_connection_for_old_invitat mock.MagicMock(value=f"v{__version__}"), ] ), - ): + ), mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade: # when await conductor.start() await conductor.stop() @@ -1631,6 +1672,8 @@ async def test_mediator_invitation_x(self, _): ) as mock_from_url, mock.patch.object( test_module, "LOGGER" ) as mock_logger, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -1694,6 +1737,8 @@ async def test_startup_x_no_storage_version(self): ) as mock_outbound_mgr, mock.patch.object( test_module, "LOGGER" ) as mock_logger, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -1735,6 +1780,8 @@ async def test_startup_storage_type_exists_and_matches(self): ) as mock_outbound_mgr, mock.patch.object( test_module, "LoggingConfigurator", autospec=True ) as mock_logger, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -1774,7 +1821,7 @@ async def test_startup_storage_type_exists_and_matches(self): await conductor.stop() - async def test_startup_storage_type_exists_and_does_not_match(self): + async def test_startup_storage_type_anoncreds_and_config_askar_re_calls_setup(self): builder: ContextBuilder = StubContextBuilder(self.test_settings) conductor = test_module.Conductor(builder) @@ -1785,6 +1832,8 @@ async def test_startup_storage_type_exists_and_does_not_match(self): ) as mock_outbound_mgr, mock.patch.object( test_module, "LoggingConfigurator", autospec=True ) as mock_logger, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -1819,9 +1868,9 @@ async def test_startup_storage_type_exists_and_does_not_match(self): mock_inbound_mgr.return_value.registered_transports = {} mock_outbound_mgr.return_value.registered_transports = {} - - with self.assertRaises(test_module.StartupError): + with mock.patch.object(test_module.Conductor, "setup") as mock_setup: await conductor.start() + assert mock_setup.called await conductor.stop() @@ -1838,6 +1887,8 @@ async def test_startup_storage_type_does_not_exist_and_existing_agent_then_set_t ) as mock_outbound_mgr, mock.patch.object( test_module, "LoggingConfigurator", autospec=True ) as mock_logger, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( @@ -1902,6 +1953,8 @@ async def test_startup_storage_type_does_not_exist_and_new_anoncreds_agent( ) as mock_outbound_mgr, mock.patch.object( test_module, "LoggingConfigurator", autospec=True ) as mock_logger, mock.patch.object( + test_module, "upgrade_wallet_to_anoncreds_if_requested", return_value=False + ) as mock_upgrade, mock.patch.object( BaseStorage, "find_record", mock.CoroutineMock( diff --git a/aries_cloudagent/holder/routes.py b/aries_cloudagent/holder/routes.py index 7b7ac9a198..97d354497d 100644 --- a/aries_cloudagent/holder/routes.py +++ b/aries_cloudagent/holder/routes.py @@ -10,9 +10,9 @@ request_schema, response_schema, ) - from marshmallow import fields +from ..admin.decorators.auth import tenant_authentication from ..admin.request_context import AdminRequestContext from ..indy.holder import IndyHolder, IndyHolderError from ..indy.models.cred_precis import IndyCredInfoSchema @@ -193,6 +193,7 @@ class CredRevokedResultSchema(OpenAPISchema): @docs(tags=["credentials"], summary="Fetch credential from wallet by id") @match_info_schema(HolderCredIdMatchInfoSchema()) @response_schema(IndyCredInfoSchema(), 200, description="") +@tenant_authentication async def credentials_get(request: web.BaseRequest): """Request handler for retrieving credential. @@ -220,6 +221,7 @@ async def credentials_get(request: web.BaseRequest): @match_info_schema(HolderCredIdMatchInfoSchema()) @querystring_schema(CredRevokedQueryStringSchema()) @response_schema(CredRevokedResultSchema(), 200, description="") +@tenant_authentication async def credentials_revoked(request: web.BaseRequest): """Request handler for querying revocation status of credential. @@ -263,6 +265,7 @@ async def credentials_revoked(request: web.BaseRequest): @docs(tags=["credentials"], summary="Get attribute MIME types from wallet") @match_info_schema(HolderCredIdMatchInfoSchema()) @response_schema(AttributeMimeTypesResultSchema(), 200, description="") +@tenant_authentication async def credentials_attr_mime_types_get(request: web.BaseRequest): """Request handler for getting credential attribute MIME types. @@ -285,6 +288,7 @@ async def credentials_attr_mime_types_get(request: web.BaseRequest): @docs(tags=["credentials"], summary="Remove credential from wallet by id") @match_info_schema(HolderCredIdMatchInfoSchema()) @response_schema(HolderModuleResponseSchema(), description="") +@tenant_authentication async def credentials_remove(request: web.BaseRequest): """Request handler for searching connection records. @@ -316,6 +320,7 @@ async def credentials_remove(request: web.BaseRequest): ) @querystring_schema(CredentialsListQueryStringSchema()) @response_schema(CredInfoListSchema(), 200, description="") +@tenant_authentication async def credentials_list(request: web.BaseRequest): """Request handler for searching credential records. @@ -354,6 +359,7 @@ async def credentials_list(request: web.BaseRequest): ) @match_info_schema(HolderCredIdMatchInfoSchema()) @response_schema(VCRecordSchema(), 200, description="") +@tenant_authentication async def w3c_cred_get(request: web.BaseRequest): """Request handler for retrieving W3C credential. @@ -385,6 +391,7 @@ async def w3c_cred_get(request: web.BaseRequest): ) @match_info_schema(HolderCredIdMatchInfoSchema()) @response_schema(HolderModuleResponseSchema(), 200, description="") +@tenant_authentication async def w3c_cred_remove(request: web.BaseRequest): """Request handler for deleting W3C credential. @@ -422,6 +429,7 @@ async def w3c_cred_remove(request: web.BaseRequest): @request_schema(W3CCredentialsListRequestSchema()) @querystring_schema(CredentialsListQueryStringSchema()) @response_schema(VCRecordListSchema(), 200, description="") +@tenant_authentication async def w3c_creds_list(request: web.BaseRequest): """Request handler for searching W3C credential records. diff --git a/aries_cloudagent/holder/tests/test_routes.py b/aries_cloudagent/holder/tests/test_routes.py index 88323ce8e0..be0f941d07 100644 --- a/aries_cloudagent/holder/tests/test_routes.py +++ b/aries_cloudagent/holder/tests/test_routes.py @@ -1,15 +1,13 @@ import json +from unittest import IsolatedAsyncioTestCase from aries_cloudagent.tests import mock -from unittest import IsolatedAsyncioTestCase from ...core.in_memory import InMemoryProfile -from ...ledger.base import BaseLedger - from ...indy.holder import IndyHolder +from ...ledger.base import BaseLedger from ...storage.vc_holder.base import VCHolder from ...storage.vc_holder.vc_record import VCRecord - from .. import routes as test_module VC_RECORD = VCRecord( @@ -33,7 +31,11 @@ class TestHolderRoutes(IsolatedAsyncioTestCase): def setUp(self): - self.profile = InMemoryProfile.test_profile() + self.profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) self.context = self.profile.context setattr(self.context, "profile", self.profile) @@ -43,6 +45,7 @@ def setUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_credentials_get(self): diff --git a/aries_cloudagent/indy/credx/holder.py b/aries_cloudagent/indy/credx/holder.py index f2efc54318..7eda477671 100644 --- a/aries_cloudagent/indy/credx/holder.py +++ b/aries_cloudagent/indy/credx/holder.py @@ -4,7 +4,6 @@ import json import logging import re -import uuid from typing import Dict, Optional, Sequence, Tuple, Union from aries_askar import AskarError, AskarErrorCode @@ -17,6 +16,7 @@ Presentation, PresentCredentials, ) +from uuid_utils import uuid4 from ...askar.profile import AskarProfile from ...ledger.base import BaseLedger @@ -195,7 +195,7 @@ async def store_credential( f"Error parsing credential definition ID: {cred_def_id}" ) - credential_id = credential_id or str(uuid.uuid4()) + credential_id = credential_id or str(uuid4()) tags = { "schema_id": schema_id, "schema_issuer_did": schema_id_parts[1], diff --git a/aries_cloudagent/indy/models/tests/test_pres_preview.py b/aries_cloudagent/indy/models/tests/test_pres_preview.py index 8f1d94614d..fc03114f5a 100644 --- a/aries_cloudagent/indy/models/tests/test_pres_preview.py +++ b/aries_cloudagent/indy/models/tests/test_pres_preview.py @@ -5,7 +5,6 @@ from time import time from unittest import TestCase -from unittest import IsolatedAsyncioTestCase from aries_cloudagent.tests import mock from ....core.in_memory import InMemoryProfile @@ -350,8 +349,7 @@ def test_eq(self): assert pred_spec_a != pred_spec_b -@pytest.mark.indy -class TestIndyPresPreviewAsync(IsolatedAsyncioTestCase): +class TestIndyPresPreviewAsync: """Presentation preview tests""" @pytest.mark.asyncio @@ -503,7 +501,6 @@ async def test_satisfaction(self): assert not attr_spec.satisfies(pred_spec) -@pytest.mark.indy class TestIndyPresPreview(TestCase): """Presentation preview tests""" diff --git a/aries_cloudagent/indy/sdk/__init__.py b/aries_cloudagent/indy/sdk/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/aries_cloudagent/indy/sdk/error.py b/aries_cloudagent/indy/sdk/error.py deleted file mode 100644 index a79e0a6194..0000000000 --- a/aries_cloudagent/indy/sdk/error.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Indy error handling.""" - -from typing import Type - -from indy.error import IndyError - -from ...core.error import BaseError - - -class IndyErrorHandler: - """Trap IndyError and raise an appropriate LedgerError instead.""" - - def __init__(self, message: str = None, error_cls: Type[BaseError] = BaseError): - """Init the context manager.""" - self.error_cls = error_cls - self.message = message - - def __enter__(self): - """Enter the context manager.""" - return self - - def __exit__(self, err_type, err_value, err_traceback): - """Exit the context manager.""" - if isinstance(err_value, IndyError): - raise IndyErrorHandler.wrap_error( - err_value, self.message, self.error_cls - ) from err_value - - @classmethod - def wrap_error( - cls, - err_value: IndyError, - message: str = None, - error_cls: Type[BaseError] = BaseError, - ) -> BaseError: - """Create an instance of BaseError from an IndyError.""" - err_msg = message or "Exception while performing indy operation" - indy_message = hasattr(err_value, "message") and err_value.message - if indy_message: - err_msg += f": {indy_message}" - err = error_cls(err_msg) - err.__traceback__ = err_value.__traceback__ - return err diff --git a/aries_cloudagent/indy/sdk/holder.py b/aries_cloudagent/indy/sdk/holder.py deleted file mode 100644 index 77e8f54256..0000000000 --- a/aries_cloudagent/indy/sdk/holder.py +++ /dev/null @@ -1,468 +0,0 @@ -"""Indy SDK holder implementation.""" - -import json -import logging -import re -from collections import OrderedDict -from typing import Optional, Sequence, Tuple, Union - -import indy.anoncreds -from indy.error import ErrorCode, IndyError - -from ...indy.sdk.wallet_setup import IndyOpenWallet -from ...ledger.base import BaseLedger -from ...storage.error import StorageError, StorageNotFoundError -from ...storage.indy import IndySdkStorage -from ...storage.record import StorageRecord -from ...wallet.error import WalletNotFoundError -from ..holder import IndyHolder, IndyHolderError -from .error import IndyErrorHandler -from .util import create_tails_reader - -LOGGER = logging.getLogger(__name__) - - -class IndySdkHolder(IndyHolder): - """Indy-SDK holder implementation.""" - - def __init__(self, wallet: IndyOpenWallet): - """Initialize an IndyHolder instance. - - Args: - wallet: IndyOpenWallet instance - - """ - self.wallet = wallet - - async def create_credential_request( - self, credential_offer: dict, credential_definition: dict, holder_did: str - ) -> Tuple[str, str]: - """Create a credential request for the given credential offer. - - Args: - credential_offer: The credential offer to create request for - credential_definition: The credential definition to create an offer for - holder_did: the DID of the agent making the request - - Returns: - A tuple of the credential request and credential request metadata - - """ - - with IndyErrorHandler( - "Error when creating credential request", IndyHolderError - ): - ( - credential_request_json, - credential_request_metadata_json, - ) = await indy.anoncreds.prover_create_credential_req( - self.wallet.handle, - holder_did, - json.dumps(credential_offer), - json.dumps(credential_definition), - self.wallet.master_secret_id, - ) - - LOGGER.debug( - "Created credential request. " - "credential_request_json=%s credential_request_metadata_json=%s", - credential_request_json, - credential_request_metadata_json, - ) - - return credential_request_json, credential_request_metadata_json - - async def store_credential( - self, - credential_definition: dict, - credential_data: dict, - credential_request_metadata: dict, - credential_attr_mime_types=None, - credential_id: str = None, - rev_reg_def: dict = None, - ) -> str: - """Store a credential in the wallet. - - Args: - credential_definition: Credential definition for this credential - credential_data: Credential data generated by the issuer - credential_request_metadata: credential request metadata generated - by the issuer - credential_attr_mime_types: dict mapping attribute names to (optional) - MIME types to store as non-secret record, if specified - credential_id: optionally override the stored credential id - rev_reg_def: revocation registry definition in json - - Returns: - the ID of the stored credential - - """ - with IndyErrorHandler( - "Error when storing credential in wallet", IndyHolderError - ): - credential_id = await indy.anoncreds.prover_store_credential( - wallet_handle=self.wallet.handle, - cred_id=credential_id, - cred_req_metadata_json=json.dumps(credential_request_metadata), - cred_json=json.dumps(credential_data), - cred_def_json=json.dumps(credential_definition), - rev_reg_def_json=json.dumps(rev_reg_def) if rev_reg_def else None, - ) - - if credential_attr_mime_types: - mime_types = { - attr: credential_attr_mime_types.get(attr) - for attr in credential_data["values"] - if attr in credential_attr_mime_types - } - if mime_types: - record = StorageRecord( - type=IndyHolder.RECORD_TYPE_MIME_TYPES, - value=credential_id, - tags=mime_types, - id=f"{IndyHolder.RECORD_TYPE_MIME_TYPES}::{credential_id}", - ) - indy_stor = IndySdkStorage(self.wallet) - await indy_stor.add_record(record) - - return credential_id - - async def get_credentials(self, start: int, count: int, wql: dict): - """Get credentials stored in the wallet. - - Args: - start: Starting index - count: Number of records to return - wql: wql query dict - - """ - - async def fetch(limit): - """Fetch up to limit (default smaller of all remaining or 256) creds.""" - creds = [] - CHUNK = min(record_count, limit or record_count, IndyHolder.CHUNK) - cardinality = min(limit or record_count, record_count) - - with IndyErrorHandler( - "Error fetching credentials from wallet", IndyHolderError - ): - while len(creds) < cardinality: - batch = json.loads( - await indy.anoncreds.prover_fetch_credentials( - search_handle, CHUNK - ) - ) - creds.extend(batch) - if len(batch) < CHUNK: - break - return creds - - with IndyErrorHandler( - "Error when constructing wallet credential query", IndyHolderError - ): - ( - search_handle, - record_count, - ) = await indy.anoncreds.prover_search_credentials( - self.wallet.handle, json.dumps(wql) - ) - - if start > 0: - # must move database cursor manually - await fetch(start) - credentials = await fetch(count) - - await indy.anoncreds.prover_close_credentials_search(search_handle) - - return credentials - - async def get_credentials_for_presentation_request_by_referent( - self, - presentation_request: dict, - referents: Sequence[str], - start: int, - count: int, - extra_query: Optional[dict] = None, - ): - """Get credentials stored in the wallet. - - Args: - presentation_request: Valid presentation request from issuer - referents: Presentation request referents to use to search for creds - start: Starting index - count: Maximum number of records to return - extra_query: wql query dict - - """ - - async def fetch(reft, limit): - """Fetch up to limit (default smaller of all remaining or 256) creds.""" - creds = [] - CHUNK = min(IndyHolder.CHUNK, limit or IndyHolder.CHUNK) - - with IndyErrorHandler( - "Error fetching credentials from wallet for presentation request", - IndyHolderError, - ): - while not limit or len(creds) < limit: - batch = json.loads( - await indy.anoncreds.prover_fetch_credentials_for_proof_req( - search_handle, reft, CHUNK - ) - ) - creds.extend(batch) - if len(batch) < CHUNK: - break - return creds - - with IndyErrorHandler( - "Error when constructing wallet credential query", IndyHolderError - ): - search_handle = ( - await ( - indy.anoncreds.prover_search_credentials_for_proof_req( - self.wallet.handle, - json.dumps(presentation_request), - json.dumps(extra_query), - ) - ) - ) - - if not referents: - referents = ( - *presentation_request["requested_attributes"], - *presentation_request["requested_predicates"], - ) - creds_dict = OrderedDict() - - try: - for reft in referents: - # must move database cursor manually - if start > 0: - await fetch(reft, start) - credentials = await fetch(reft, count) - - for cred in credentials: - cred_id = cred["cred_info"]["referent"] - if cred_id not in creds_dict: - cred["presentation_referents"] = {reft} - creds_dict[cred_id] = cred - else: - creds_dict[cred_id]["presentation_referents"].add(reft) - finally: - # Always close - await indy.anoncreds.prover_close_credentials_search_for_proof_req( - search_handle - ) - - for cred in creds_dict.values(): - cred["presentation_referents"] = list(cred["presentation_referents"]) - - creds_ordered = tuple( - sorted( - creds_dict.values(), - key=lambda c: ( - c["cred_info"]["rev_reg_id"] or "", # irrevocable 1st - c["cred_info"][ - "referent" - ], # should be descending by timestamp if we had it - ), - ) - )[:count] - return creds_ordered - - async def get_credential(self, credential_id: str) -> str: - """Get a credential stored in the wallet. - - Args: - credential_id: Credential id to retrieve - - """ - try: - credential_json = await indy.anoncreds.prover_get_credential( - self.wallet.handle, credential_id - ) - except IndyError as err: - if err.error_code == ErrorCode.WalletItemNotFound: - raise WalletNotFoundError( - "Credential {} not found in wallet {}".format( - credential_id, self.wallet.name - ) - ) - else: - raise IndyErrorHandler.wrap_error( - err, - f"Error when fetching credential {credential_id}", - IndyHolderError, - ) from err - - return credential_json - - async def credential_revoked( - self, ledger: BaseLedger, credential_id: str, fro: int = None, to: int = None - ) -> bool: - """Check ledger for revocation status of credential by cred id. - - Args: - credential_id: Credential id to check - - """ - cred = json.loads(await self.get_credential(credential_id)) - rev_reg_id = cred["rev_reg_id"] - - if rev_reg_id: - cred_rev_id = int(cred["cred_rev_id"]) - (rev_reg_delta, _) = await ledger.get_revoc_reg_delta( - rev_reg_id, - fro, - to, - ) - - return cred_rev_id in rev_reg_delta["value"].get("revoked", []) - else: - return False - - async def delete_credential(self, credential_id: str): - """Remove a credential stored in the wallet. - - Args: - credential_id: Credential id to remove - - """ - try: - indy_stor = IndySdkStorage(self.wallet) - mime_types_record = await indy_stor.get_record( - IndyHolder.RECORD_TYPE_MIME_TYPES, - f"{IndyHolder.RECORD_TYPE_MIME_TYPES}::{credential_id}", - ) - await indy_stor.delete_record(mime_types_record) - except StorageNotFoundError: - pass # MIME types record not present: carry on - - try: - await indy.anoncreds.prover_delete_credential( - self.wallet.handle, credential_id - ) - except IndyError as err: - if err.error_code == ErrorCode.WalletItemNotFound: - raise WalletNotFoundError( - "Credential {} not found in wallet {}".format( - credential_id, self.wallet.name - ) - ) - else: - raise IndyErrorHandler.wrap_error( - err, "Error when deleting credential", IndyHolderError - ) from err - - async def get_mime_type( - self, credential_id: str, attr: str = None - ) -> Union[dict, str]: - """Get MIME type per attribute (or for all attributes). - - Args: - credential_id: credential id - attr: attribute of interest or omit for all - - Returns: Attribute MIME type or dict mapping attribute names to MIME types - attr_meta_json = all_meta.tags.get(attr) - - """ - try: - mime_types_record = await IndySdkStorage(self.wallet).get_record( - IndyHolder.RECORD_TYPE_MIME_TYPES, - f"{IndyHolder.RECORD_TYPE_MIME_TYPES}::{credential_id}", - ) - except StorageError: - return None # no MIME types: not an error - - return mime_types_record.tags.get(attr) if attr else mime_types_record.tags - - async def create_presentation( - self, - presentation_request: dict, - requested_credentials: dict, - schemas: dict, - credential_definitions: dict, - rev_states: dict = None, - ) -> str: - """Get credentials stored in the wallet. - - Args: - presentation_request: Valid indy format presentation request - requested_credentials: Indy format requested credentials - schemas: Indy formatted schemas JSON - credential_definitions: Indy formatted credential definitions JSON - rev_states: Indy format revocation states JSON - - """ - - for reft, spec in presentation_request.get("requested_attributes", {}).items(): - for r in spec.get("restrictions", []): - for k in r: - m = re.match("^attr::(.*)::value$", k) - if not m: - continue - - named_attrs = ( - [spec["name"]] if "name" in spec else spec.get("names", []) - ) - restricted_attr = m.group(1) - if m and restricted_attr not in named_attrs: # wrong attr: hopeless - LOGGER.error( - f"Presentation request {presentation_request['nonce']} " - f"requested attribute {reft} names {named_attrs} " - f"but restricts {restricted_attr} value" - ) - raise IndyHolderError( - f"Requested attribute {reft} names {named_attrs} " - f"but restricts {restricted_attr} value" - ) - - with IndyErrorHandler("Error when constructing proof", IndyHolderError): - presentation_json = await indy.anoncreds.prover_create_proof( - self.wallet.handle, - json.dumps(presentation_request), - json.dumps(requested_credentials), - self.wallet.master_secret_id, - json.dumps(schemas), - json.dumps(credential_definitions), - json.dumps(rev_states) if rev_states else "{}", - ) - - return presentation_json - - async def create_revocation_state( - self, - cred_rev_id: str, - rev_reg_def: dict, - rev_reg_delta: dict, - timestamp: int, - tails_file_path: str, - ) -> str: - """Create current revocation state for a received credential. - - Args: - cred_rev_id: credential revocation id in revocation registry - rev_reg_def: revocation registry definition - rev_reg_delta: revocation delta - timestamp: delta timestamp - - Returns: - the revocation state - - """ - - with IndyErrorHandler( - "Error when constructing revocation state", IndyHolderError - ): - tails_file_reader = await create_tails_reader(tails_file_path) - rev_state_json = await indy.anoncreds.create_revocation_state( - tails_file_reader, - rev_reg_def_json=json.dumps(rev_reg_def), - cred_rev_id=cred_rev_id, - rev_reg_delta_json=json.dumps(rev_reg_delta), - timestamp=timestamp, - ) - - return rev_state_json diff --git a/aries_cloudagent/indy/sdk/issuer.py b/aries_cloudagent/indy/sdk/issuer.py deleted file mode 100644 index 8ac62b8637..0000000000 --- a/aries_cloudagent/indy/sdk/issuer.py +++ /dev/null @@ -1,378 +0,0 @@ -"""Indy SDK issuer implementation.""" - -import json -import logging -from typing import Sequence, Tuple - -import indy.anoncreds -import indy.blob_storage -from indy.error import AnoncredsRevocationRegistryFullError, IndyError, ErrorCode - -from ...indy.sdk.profile import IndySdkProfile -from ...messaging.util import encode -from ...storage.error import StorageError - -from ..issuer import ( - IndyIssuer, - IndyIssuerError, - IndyIssuerRevocationRegistryFullError, - DEFAULT_CRED_DEF_TAG, - DEFAULT_SIGNATURE_TYPE, -) - -from .error import IndyErrorHandler -from .util import create_tails_reader, create_tails_writer - -LOGGER = logging.getLogger(__name__) - - -class IndySdkIssuer(IndyIssuer): - """Indy-SDK issuer implementation.""" - - def __init__(self, profile: IndySdkProfile): - """Initialize an IndyIssuer instance. - - Args: - profile: IndySdkProfile instance - - """ - self.profile = profile - - async def create_schema( - self, - origin_did: str, - schema_name: str, - schema_version: str, - attribute_names: Sequence[str], - ) -> Tuple[str, str]: - """Create a new credential schema. - - Args: - origin_did: the DID issuing the credential definition - schema_name: the schema name - schema_version: the schema version - attribute_names: a sequence of schema attribute names - - Returns: - A tuple of the schema ID and JSON - - """ - - with IndyErrorHandler("Error when creating schema", IndyIssuerError): - schema_id, schema_json = await indy.anoncreds.issuer_create_schema( - origin_did, - schema_name, - schema_version, - json.dumps(attribute_names), - ) - return (schema_id, schema_json) - - async def credential_definition_in_wallet( - self, credential_definition_id: str - ) -> bool: - """Check whether a given credential definition ID is present in the wallet. - - Args: - credential_definition_id: The credential definition ID to check - """ - try: - await indy.anoncreds.issuer_create_credential_offer( - self.profile.wallet.handle, credential_definition_id - ) - return True - except IndyError as err: - if err.error_code not in ( - ErrorCode.CommonInvalidStructure, - ErrorCode.WalletItemNotFound, - ): - raise IndyErrorHandler.wrap_error( - err, - "Error when checking wallet for credential definition", - IndyIssuerError, - ) from err - # recognized error signifies no such cred def in wallet: pass - return False - - async def create_and_store_credential_definition( - self, - origin_did: str, - schema: dict, - signature_type: str = None, - tag: str = None, - support_revocation: bool = False, - ) -> Tuple[str, str]: - """Create a new credential definition and store it in the wallet. - - Args: - origin_did: the DID issuing the credential definition - schema: the schema used as a basis - signature_type: the credential definition signature type (default 'CL') - tag: the credential definition tag - support_revocation: whether to enable revocation for this credential def - - Returns: - A tuple of the credential definition ID and JSON - - """ - - with IndyErrorHandler( - "Error when creating credential definition", IndyIssuerError - ): - ( - credential_definition_id, - credential_definition_json, - ) = await indy.anoncreds.issuer_create_and_store_credential_def( - self.profile.wallet.handle, - origin_did, - json.dumps(schema), - tag or DEFAULT_CRED_DEF_TAG, - signature_type or DEFAULT_SIGNATURE_TYPE, - json.dumps({"support_revocation": support_revocation}), - ) - return (credential_definition_id, credential_definition_json) - - async def create_credential_offer(self, credential_definition_id: str) -> str: - """Create a credential offer for the given credential definition id. - - Args: - credential_definition_id: The credential definition to create an offer for - - Returns: - The created credential offer - - """ - with IndyErrorHandler( - "Exception when creating credential offer", IndyIssuerError - ): - credential_offer_json = await indy.anoncreds.issuer_create_credential_offer( - self.profile.wallet.handle, credential_definition_id - ) - - return credential_offer_json - - async def create_credential( - self, - schema: dict, - credential_offer: dict, - credential_request: dict, - credential_values: dict, - rev_reg_id: str = None, - tails_file_path: str = None, - ) -> Tuple[str, str]: - """Create a credential. - - Args: - schema: Schema to create credential for - credential_offer: Credential Offer to create credential for - credential_request: Credential request to create credential for - credential_values: Values to go in credential - rev_reg_id: ID of the revocation registry - tails_file_path: Path to the local tails file - - Returns: - A tuple of created credential and revocation id - - """ - - encoded_values = {} - schema_attributes = schema["attrNames"] - for attribute in schema_attributes: - # Ensure every attribute present in schema to be set. - # Extraneous attribute names are ignored. - try: - credential_value = credential_values[attribute] - except KeyError: - raise IndyIssuerError( - "Provided credential values are missing a value " - + f"for the schema attribute '{attribute}'" - ) - - encoded_values[attribute] = {} - encoded_values[attribute]["raw"] = str(credential_value) - encoded_values[attribute]["encoded"] = encode(credential_value) - - tails_reader_handle = ( - await create_tails_reader(tails_file_path) - if tails_file_path is not None - else None - ) - - try: - ( - credential_json, - cred_rev_id, - _, # rev_reg_delta_json only for ISSUANCE_ON_DEMAND, excluded by design - ) = await indy.anoncreds.issuer_create_credential( - self.profile.wallet.handle, - json.dumps(credential_offer), - json.dumps(credential_request), - json.dumps(encoded_values), - rev_reg_id, - tails_reader_handle, - ) - except AnoncredsRevocationRegistryFullError: - LOGGER.warning( - "Revocation registry %s is full: cannot create credential", - rev_reg_id, - ) - raise IndyIssuerRevocationRegistryFullError( - f"Revocation registry {rev_reg_id} is full" - ) - except IndyError as err: - raise IndyErrorHandler.wrap_error( - err, "Error when issuing credential", IndyIssuerError - ) from err - except StorageError as err: - LOGGER.warning( - ( - "Created issuer cred rev record for " - "Could not store issuer cred rev record for " - "rev reg id %s, cred rev id %s: %s" - ), - rev_reg_id, - cred_rev_id, - err.roll_up, - ) - - return (credential_json, cred_rev_id) - - async def revoke_credentials( - self, - cred_def_id: str, - rev_reg_id: str, - tails_file_path: str, - cred_rev_ids: Sequence[str], - ) -> Tuple[str, Sequence[str]]: - """Revoke a set of credentials in a revocation registry. - - Args: - cred_def_id: ID of the credential definition - rev_reg_id: ID of the revocation registry - tails_file_path: path to the local tails file - cred_rev_ids: sequences of credential indexes in the revocation registry - - Returns: - Tuple with the combined revocation delta, list of cred rev ids not revoked - - """ - failed_crids = set() - tails_reader_handle = await create_tails_reader(tails_file_path) - - result_json = None - for cred_rev_id in set(cred_rev_ids): - with IndyErrorHandler( - "Exception when revoking credential", IndyIssuerError - ): - try: - delta_json = await indy.anoncreds.issuer_revoke_credential( - self.profile.wallet.handle, - tails_reader_handle, - rev_reg_id, - cred_rev_id, - ) - except IndyError as err: - if err.error_code == ErrorCode.AnoncredsInvalidUserRevocId: - LOGGER.error( - ( - "Abstaining from revoking credential on " - "rev reg id %s, cred rev id=%s: " - "already revoked or not yet issued" - ), - rev_reg_id, - cred_rev_id, - ) - else: - LOGGER.error( - IndyErrorHandler.wrap_error( - err, "Revocation error", IndyIssuerError - ).roll_up - ) - failed_crids.add(int(cred_rev_id)) - continue - except StorageError as err: - LOGGER.warning( - ( - "Revoked credential on rev reg id %s, cred rev id %s " - "without corresponding issuer cred rev record: %s" - ), - rev_reg_id, - cred_rev_id, - err.roll_up, - ) - # carry on with delta merge; record is best-effort - - if result_json: - result_json = await self.merge_revocation_registry_deltas( - result_json, delta_json - ) - else: - result_json = delta_json - - return (result_json, [str(rev_id) for rev_id in sorted(failed_crids)]) - - async def merge_revocation_registry_deltas( - self, fro_delta: str, to_delta: str - ) -> str: - """Merge revocation registry deltas. - - Args: - fro_delta: original delta in JSON format - to_delta: incoming delta in JSON format - - Returns: - Merged delta in JSON format - - """ - - return await indy.anoncreds.issuer_merge_revocation_registry_deltas( - fro_delta, to_delta - ) - - async def create_and_store_revocation_registry( - self, - origin_did: str, - cred_def_id: str, - revoc_def_type: str, - tag: str, - max_cred_num: int, - tails_base_path: str, - ) -> Tuple[str, str, str]: - """Create a new revocation registry and store it in the wallet. - - Args: - origin_did: the DID issuing the revocation registry - cred_def_id: the identifier of the related credential definition - revoc_def_type: the revocation registry type (default CL_ACCUM) - tag: the unique revocation registry tag - max_cred_num: the number of credentials supported in the registry - tails_base_path: where to store the tails file - - Returns: - A tuple of the revocation registry ID, JSON, and entry JSON - - """ - - tails_writer = await create_tails_writer(tails_base_path) - - with IndyErrorHandler( - "Exception when creating revocation registry", IndyIssuerError - ): - ( - rev_reg_id, - rev_reg_def_json, - rev_reg_entry_json, - ) = await indy.anoncreds.issuer_create_and_store_revoc_reg( - self.profile.wallet.handle, - origin_did, - revoc_def_type, - tag, - cred_def_id, - json.dumps( - { - "issuance_type": "ISSUANCE_BY_DEFAULT", - "max_cred_num": max_cred_num, - } - ), - tails_writer, - ) - return (rev_reg_id, rev_reg_def_json, rev_reg_entry_json) diff --git a/aries_cloudagent/indy/sdk/profile.py b/aries_cloudagent/indy/sdk/profile.py deleted file mode 100644 index 71b6530b38..0000000000 --- a/aries_cloudagent/indy/sdk/profile.py +++ /dev/null @@ -1,241 +0,0 @@ -"""Manage Indy-SDK profile interaction.""" - -import asyncio -import logging -from typing import Any, Mapping -import warnings -from weakref import finalize, ref - -from ...cache.base import BaseCache -from ...config.injection_context import InjectionContext -from ...config.provider import ClassProvider -from ...core.error import ProfileError -from ...core.profile import Profile, ProfileManager, ProfileSession -from ...ledger.base import BaseLedger -from ...ledger.indy import IndySdkLedger, IndySdkLedgerPool -from ...storage.base import BaseStorage, BaseStorageSearch -from ...storage.vc_holder.base import VCHolder -from ...utils.multi_ledger import get_write_ledger_config_for_profile -from ...wallet.base import BaseWallet -from ...wallet.indy import IndySdkWallet -from ..holder import IndyHolder -from ..issuer import IndyIssuer -from ..verifier import IndyVerifier -from .wallet_setup import IndyOpenWallet, IndyWalletConfig - -LOGGER = logging.getLogger(__name__) - - -class IndySdkProfile(Profile): - """Provide access to Indy profile interaction methods.""" - - BACKEND_NAME = "indy" - - def __init__( - self, - opened: IndyOpenWallet, - context: InjectionContext = None, - ): - """Create a new IndyProfile instance.""" - super().__init__(context=context, name=opened.name, created=opened.created) - self.opened = opened - self.ledger_pool: IndySdkLedgerPool = None - self.init_ledger_pool() - self.bind_providers() - self._finalizer = self._make_finalizer(opened) - - @property - def name(self) -> str: - """Accessor for the profile name.""" - return self.opened.name - - @property - def wallet(self) -> IndyOpenWallet: - """Accessor for the opened wallet instance.""" - return self.opened - - def init_ledger_pool(self): - """Initialize the ledger pool.""" - if self.settings.get("ledger.disabled"): - LOGGER.info("Ledger support is disabled") - return - - if self.settings.get("ledger.genesis_transactions"): - self.ledger_pool = self.context.inject(IndySdkLedgerPool, self.settings) - - def bind_providers(self): - """Initialize the profile-level instance providers.""" - injector = self._context.injector - - injector.bind_provider( - BaseStorageSearch, - ClassProvider("aries_cloudagent.storage.indy.IndySdkStorage", self.opened), - ) - - injector.bind_provider( - IndyHolder, - ClassProvider( - "aries_cloudagent.indy.sdk.holder.IndySdkHolder", self.opened - ), - ) - injector.bind_provider( - IndyIssuer, - ClassProvider("aries_cloudagent.indy.sdk.issuer.IndySdkIssuer", ref(self)), - ) - - injector.bind_provider( - VCHolder, - ClassProvider( - "aries_cloudagent.storage.vc_holder.indy.IndySdkVCHolder", self.opened - ), - ) - if ( - self.settings.get("ledger.ledger_config_list") - and len(self.settings.get("ledger.ledger_config_list")) >= 1 - ): - write_ledger_config = get_write_ledger_config_for_profile( - settings=self.settings - ) - cache = self.context.injector.inject_or(BaseCache) - injector.bind_provider( - BaseLedger, - ClassProvider( - IndySdkLedger, - IndySdkLedgerPool( - write_ledger_config.get("pool_name") - or write_ledger_config.get("id"), - keepalive=write_ledger_config.get("keepalive"), - cache=cache, - genesis_transactions=write_ledger_config.get( - "genesis_transactions" - ), - read_only=write_ledger_config.get("read_only"), - socks_proxy=write_ledger_config.get("socks_proxy"), - ), - ref(self), - ), - ) - self.settings["ledger.write_ledger"] = write_ledger_config.get("id") - if ( - "endorser_alias" in write_ledger_config - and "endorser_did" in write_ledger_config - ): - self.settings["endorser.endorser_alias"] = write_ledger_config.get( - "endorser_alias" - ) - self.settings["endorser.endorser_public_did"] = write_ledger_config.get( - "endorser_did" - ) - elif self.ledger_pool: - injector.bind_provider( - BaseLedger, ClassProvider(IndySdkLedger, self.ledger_pool, ref(self)) - ) - if self.ledger_pool or self.settings.get("ledger.ledger_config_list"): - injector.bind_provider( - IndyVerifier, - ClassProvider( - "aries_cloudagent.indy.sdk.verifier.IndySdkVerifier", - ref(self), - ), - ) - - def session(self, context: InjectionContext = None) -> "ProfileSession": - """Start a new interactive session with no transaction support requested.""" - return IndySdkProfileSession(self, context=context) - - def transaction(self, context: InjectionContext = None) -> "ProfileSession": - """Start a new interactive session with commit and rollback support. - - If the current backend does not support transactions, then commit - and rollback operations of the session will not have any effect. - """ - return IndySdkProfileSession(self, context=context) - - async def close(self): - """Close the profile instance.""" - if self.opened: - await self.opened.close() - self.opened = None - - def _make_finalizer(self, opened: IndyOpenWallet) -> finalize: - """Return a finalizer for this profile. - - See docs for weakref.finalize for more details on behavior of finalizers. - """ - - async def _closer(opened: IndyOpenWallet): - try: - await opened.close() - except Exception: - LOGGER.exception("Failed to close wallet from finalizer") - - def _finalize(opened: IndyOpenWallet): - LOGGER.debug("Profile finalizer called; closing wallet") - asyncio.get_event_loop().create_task(_closer(opened)) - - return finalize(self, _finalize, opened) - - async def remove(self): - """Remove the profile associated with this instance.""" - if not self.opened: - raise ProfileError("Wallet must be opened to remove profile") - - self.opened.config.auto_remove = True - await self.close() - - -class IndySdkProfileSession(ProfileSession): - """An active connection to the profile management backend.""" - - def __init__( - self, - profile: Profile, - *, - context: InjectionContext = None, - settings: Mapping[str, Any] = None - ): - """Create a new IndySdkProfileSession instance.""" - super().__init__(profile=profile, context=context, settings=settings) - - async def _setup(self): - """Create the session or transaction connection, if needed.""" - injector = self._context.injector - injector.bind_provider( - BaseWallet, ClassProvider(IndySdkWallet, self.profile.opened) - ) - injector.bind_provider( - BaseStorage, - ClassProvider( - "aries_cloudagent.storage.indy.IndySdkStorage", self.profile.opened - ), - ) - - -class IndySdkProfileManager(ProfileManager): - """Manager for Indy-SDK wallets.""" - - async def provision( - self, context: InjectionContext, config: Mapping[str, Any] = None - ) -> Profile: - """Provision a new instance of a profile.""" - indy_config = IndyWalletConfig(config) - opened = await indy_config.create_wallet() - return IndySdkProfile(opened, context) - - async def open( - self, context: InjectionContext, config: Mapping[str, Any] = None - ) -> Profile: - """Open an instance of an existing profile.""" - warnings.warn( - "Indy wallet type is deprecated, use Askar instead; see: " - "https://aca-py.org/main/deploying/IndySDKtoAskarMigration/", - DeprecationWarning, - ) - LOGGER.warning( - "Indy wallet type is deprecated, use Askar instead; see: " - "https://aca-py.org/main/deploying/IndySDKtoAskarMigration/", - ) - - indy_config = IndyWalletConfig(config) - opened = await indy_config.open_wallet() - return IndySdkProfile(opened, context) diff --git a/aries_cloudagent/indy/sdk/tests/__init__.py b/aries_cloudagent/indy/sdk/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/aries_cloudagent/indy/sdk/tests/test_holder.py b/aries_cloudagent/indy/sdk/tests/test_holder.py deleted file mode 100644 index f9a3084958..0000000000 --- a/aries_cloudagent/indy/sdk/tests/test_holder.py +++ /dev/null @@ -1,603 +0,0 @@ -import json -import pytest - -from aries_cloudagent.tests import mock -from unittest import IsolatedAsyncioTestCase - -import indy.anoncreds - -from indy.error import IndyError, ErrorCode - -from ...holder import IndyHolder, IndyHolderError - -from .. import holder as test_module - - -@pytest.mark.indy -class TestIndySdkHolder(IsolatedAsyncioTestCase): - def setUp(self): - mock_ledger = mock.MagicMock( - get_credential_definition=mock.MagicMock(return_value={"value": {}}), - get_revoc_reg_delta=mock.CoroutineMock( - return_value=( - {"value": {"...": "..."}}, - 1234567890, - ) - ), - ) - mock_ledger.__aenter__ = mock.CoroutineMock(return_value=mock_ledger) - self.ledger = mock_ledger - self.wallet = mock.MagicMock() - - self.holder = test_module.IndySdkHolder(self.wallet) - assert "IndySdkHolder" in str(self.holder) - - @mock.patch("indy.anoncreds.prover_create_credential_req") - async def test_create_credential_request(self, mock_create_credential_req): - mock_create_credential_req.return_value = ("{}", "[]") - - cred_req_json, cred_req_meta_json = await self.holder.create_credential_request( - "credential_offer", "credential_definition", "did" - ) - - mock_create_credential_req.assert_called_once_with( - self.wallet.handle, - "did", - json.dumps("credential_offer"), - json.dumps("credential_definition"), - self.wallet.master_secret_id, - ) - - assert (json.loads(cred_req_json), json.loads(cred_req_meta_json)) == ({}, []) - - @mock.patch("indy.anoncreds.prover_store_credential") - async def test_store_credential(self, mock_store_cred): - mock_store_cred.return_value = "cred_id" - - cred_id = await self.holder.store_credential( - "credential_definition", "credential_data", "credential_request_metadata" - ) - - mock_store_cred.assert_called_once_with( - wallet_handle=self.wallet.handle, - cred_id=None, - cred_req_metadata_json=json.dumps("credential_request_metadata"), - cred_json=json.dumps("credential_data"), - cred_def_json=json.dumps("credential_definition"), - rev_reg_def_json=None, - ) - - assert cred_id == "cred_id" - - @mock.patch("indy.anoncreds.prover_store_credential") - async def test_store_credential_with_mime_types(self, mock_store_cred): - with mock.patch.object( - test_module, "IndySdkStorage", mock.MagicMock() - ) as mock_storage: - mock_storage.return_value = mock.MagicMock(add_record=mock.CoroutineMock()) - - mock_store_cred.return_value = "cred_id" - - CRED_DATA = {"values": {"cameo": "d29yZCB1cA=="}} - cred_id = await self.holder.store_credential( - "credential_definition", - CRED_DATA, - "credential_request_metadata", - {"cameo": "image/png"}, - ) - - mock_store_cred.assert_called_once_with( - wallet_handle=self.wallet.handle, - cred_id=None, - cred_req_metadata_json=json.dumps("credential_request_metadata"), - cred_json=json.dumps(CRED_DATA), - cred_def_json=json.dumps("credential_definition"), - rev_reg_def_json=None, - ) - mock_storage.return_value.add_record.assert_called_once() - - assert cred_id == "cred_id" - - @mock.patch("indy.non_secrets.get_wallet_record") - async def test_get_credential_attrs_mime_types(self, mock_nonsec_get_wallet_record): - cred_id = "credential_id" - dummy_tags = {"a": "1", "b": "2"} - dummy_rec = { - "type": IndyHolder.RECORD_TYPE_MIME_TYPES, - "id": cred_id, - "value": "value", - "tags": dummy_tags, - } - mock_nonsec_get_wallet_record.return_value = json.dumps(dummy_rec) - - mime_types = await self.holder.get_mime_type(cred_id) - - mock_nonsec_get_wallet_record.assert_called_once_with( - self.wallet.handle, - dummy_rec["type"], - f"{IndyHolder.RECORD_TYPE_MIME_TYPES}::{dummy_rec['id']}", - json.dumps( - {"retrieveType": False, "retrieveValue": True, "retrieveTags": True} - ), - ) - - assert mime_types == dummy_tags - - @mock.patch("indy.non_secrets.get_wallet_record") - async def test_get_credential_attr_mime_type(self, mock_nonsec_get_wallet_record): - cred_id = "credential_id" - dummy_tags = {"a": "1", "b": "2"} - dummy_rec = { - "type": IndyHolder.RECORD_TYPE_MIME_TYPES, - "id": cred_id, - "value": "value", - "tags": dummy_tags, - } - mock_nonsec_get_wallet_record.return_value = json.dumps(dummy_rec) - - a_mime_type = await self.holder.get_mime_type(cred_id, "a") - - mock_nonsec_get_wallet_record.assert_called_once_with( - self.wallet.handle, - dummy_rec["type"], - f"{IndyHolder.RECORD_TYPE_MIME_TYPES}::{dummy_rec['id']}", - json.dumps( - {"retrieveType": False, "retrieveValue": True, "retrieveTags": True} - ), - ) - - assert a_mime_type == dummy_tags["a"] - - @mock.patch("indy.non_secrets.get_wallet_record") - async def test_get_credential_attr_mime_type_x(self, mock_nonsec_get_wallet_record): - cred_id = "credential_id" - dummy_tags = {"a": "1", "b": "2"} - dummy_rec = { - "type": IndyHolder.RECORD_TYPE_MIME_TYPES, - "id": cred_id, - "value": "value", - "tags": dummy_tags, - } - mock_nonsec_get_wallet_record.side_effect = test_module.StorageError() - - assert await self.holder.get_mime_type(cred_id, "a") is None - - @mock.patch("indy.anoncreds.prover_search_credentials") - @mock.patch("indy.anoncreds.prover_fetch_credentials") - @mock.patch("indy.anoncreds.prover_close_credentials_search") - async def test_get_credentials( - self, mock_close_cred_search, mock_fetch_credentials, mock_search_credentials - ): - SIZE = 300 - mock_search_credentials.return_value = ("search_handle", 350) - mock_fetch_credentials.side_effect = [ - json.dumps([0] * test_module.IndySdkHolder.CHUNK), - json.dumps([1] * (SIZE % test_module.IndySdkHolder.CHUNK)), - ] - - credentials = await self.holder.get_credentials(0, SIZE, {}) - mock_search_credentials.assert_called_once_with( - self.wallet.handle, json.dumps({}) - ) - - assert mock_fetch_credentials.call_count == 2 - mock_close_cred_search.assert_called_once_with("search_handle") - - assert len(credentials) == SIZE - - mock_fetch_credentials.side_effect = [ - json.dumps([0] * test_module.IndySdkHolder.CHUNK), - json.dumps([1] * (SIZE % test_module.IndySdkHolder.CHUNK)), - ] - credentials = await self.holder.get_credentials(0, 0, {}) # 0 defaults to all - assert len(credentials) == SIZE - - @mock.patch("indy.anoncreds.prover_search_credentials") - @mock.patch("indy.anoncreds.prover_fetch_credentials") - @mock.patch("indy.anoncreds.prover_close_credentials_search") - async def test_get_credentials_seek( - self, mock_close_cred_search, mock_fetch_credentials, mock_search_credentials - ): - mock_search_credentials.return_value = ("search_handle", 3) - mock_fetch_credentials.return_value = "[1,2,3]" - - credentials = await self.holder.get_credentials(2, 3, {}) - assert mock_fetch_credentials.call_args_list == [ - (("search_handle", 2),), - (("search_handle", 3),), - ] - - @mock.patch("indy.anoncreds.prover_search_credentials_for_proof_req") - @mock.patch("indy.anoncreds.prover_fetch_credentials_for_proof_req") - @mock.patch("indy.anoncreds.prover_close_credentials_search_for_proof_req") - async def test_get_credentials_for_presentation_request_by_reft( - self, - mock_prover_close_credentials_search_for_proof_req, - mock_prover_fetch_credentials_for_proof_req, - mock_prover_search_credentials_for_proof_req, - ): - SIZE = 300 - SKIP = 50 - mock_prover_search_credentials_for_proof_req.return_value = "search_handle" - mock_prover_fetch_credentials_for_proof_req.side_effect = [ - json.dumps( - [ - {"cred_info": {"referent": f"skip-{i}", "rev_reg_id": None}} - for i in range(SKIP) - ] - ), - json.dumps( - [ - { - "cred_info": { - "referent": f"reft-{i}", - "rev_reg_id": None if i % 2 else "dummy-rrid", - } - } - for i in range(test_module.IndyHolder.CHUNK) - ] - ), - json.dumps( - [ - { - "cred_info": { - "referent": f"reft-{test_module.IndyHolder.CHUNK + i}", - "rev_reg_id": None, - } - } - for i in range(SIZE % test_module.IndyHolder.CHUNK) - ] - ), - ] - - PROOF_REQ = { - "requested_attributes": {"attr_0_uuid": {"...": "..."}}, - "requested_predicates": {"pred_0_uuid": {"...": "..."}}, - } - credentials = ( - await self.holder.get_credentials_for_presentation_request_by_referent( - PROOF_REQ, - ("asdb",), - 50, - SIZE, - {"extra": "query"}, - ) - ) - - mock_prover_search_credentials_for_proof_req.assert_called_once_with( - self.wallet.handle, - json.dumps(PROOF_REQ), - json.dumps({"extra": "query"}), - ) - - assert mock_prover_fetch_credentials_for_proof_req.call_count == 3 - mock_prover_close_credentials_search_for_proof_req.assert_called_once_with( - "search_handle" - ) - - assert len(credentials) == SIZE - assert all( - not c["cred_info"]["rev_reg_id"] - for c in credentials[ - 0 : len(credentials) - (test_module.IndyHolder.CHUNK // 2) - ] - ) # irrevocable first - assert all( - c["cred_info"]["rev_reg_id"] - for c in credentials[-test_module.IndyHolder.CHUNK // 2 :] - ) # revocable last - - @mock.patch("indy.anoncreds.prover_search_credentials_for_proof_req") - @mock.patch("indy.anoncreds.prover_fetch_credentials_for_proof_req") - @mock.patch("indy.anoncreds.prover_close_credentials_search_for_proof_req") - async def test_get_credentials_for_presentation_request_by_referent_default_refts( - self, - mock_prover_close_credentials_search_for_proof_req, - mock_prover_fetch_credentials_for_proof_req, - mock_prover_search_credentials_for_proof_req, - ): - mock_prover_search_credentials_for_proof_req.return_value = "search_handle" - mock_prover_fetch_credentials_for_proof_req.return_value = json.dumps( - [{"cred_info": {"referent": "asdb", "rev_reg_id": None}}] - ) - - PRES_REQ = { - "requested_attributes": { - "0_a_uuid": {"...": "..."}, - "1_b_uuid": {"...": "..."}, - }, - "requested_predicates": {"2_c_ge_80": {"...": "..."}}, - } - - credentials = ( - await self.holder.get_credentials_for_presentation_request_by_referent( - PRES_REQ, - None, - 2, - 3, - ) - ) - - mock_prover_search_credentials_for_proof_req.assert_called_once_with( - self.wallet.handle, json.dumps(PRES_REQ), json.dumps({}) - ) - - @mock.patch("indy.anoncreds.prover_get_credential") - async def test_get_credential(self, mock_get_cred): - mock_get_cred.return_value = "{}" - credential_json = await self.holder.get_credential("credential_id") - mock_get_cred.assert_called_once_with(self.wallet.handle, "credential_id") - - assert json.loads(credential_json) == {} - - @mock.patch("indy.anoncreds.prover_get_credential") - async def test_get_credential_not_found(self, mock_get_cred): - mock_get_cred.side_effect = IndyError(error_code=ErrorCode.WalletItemNotFound) - with self.assertRaises(test_module.WalletNotFoundError): - await self.holder.get_credential("credential_id") - - @mock.patch("indy.anoncreds.prover_get_credential") - async def test_get_credential_x(self, mock_get_cred): - mock_get_cred.side_effect = IndyError("unexpected failure") - - with self.assertRaises(test_module.IndyHolderError): - await self.holder.get_credential("credential_id") - - async def test_credential_revoked(self): - with mock.patch.object( # no creds revoked - self.holder, "get_credential", mock.CoroutineMock() - ) as mock_get_cred: - mock_get_cred.return_value = json.dumps( - { - "rev_reg_id": "dummy-rrid", - "cred_rev_id": "123", - "...": "...", - } - ) - result = await self.holder.credential_revoked(self.ledger, "credential_id") - assert not result - - with mock.patch.object( # cred not revocable - self.holder, "get_credential", mock.CoroutineMock() - ) as mock_get_cred: - mock_get_cred.return_value = json.dumps( - { - "rev_reg_id": None, - "cred_rev_id": None, - "...": "...", - } - ) - result = await self.holder.credential_revoked(self.ledger, "credential_id") - assert not result - - self.ledger.get_revoc_reg_delta = mock.CoroutineMock( - return_value=( - { - "value": { - "revoked": [1, 2, 3], - "...": "...", - } - }, - 1234567890, - ) - ) - with mock.patch.object( # cred not revoked - self.holder, "get_credential", mock.CoroutineMock() - ) as mock_get_cred: - mock_get_cred.return_value = json.dumps( - { - "rev_reg_id": "dummy-rrid", - "cred_rev_id": "123", - "...": "...", - } - ) - result = await self.holder.credential_revoked(self.ledger, "credential_id") - assert not result - - with mock.patch.object( # cred revoked - self.holder, "get_credential", mock.CoroutineMock() - ) as mock_get_cred: - mock_get_cred.return_value = json.dumps( - { - "rev_reg_id": "dummy-rrid", - "cred_rev_id": "2", - "...": "...", - } - ) - result = await self.holder.credential_revoked(self.ledger, "credential_id") - assert result - - @mock.patch("indy.anoncreds.prover_delete_credential") - @mock.patch("indy.non_secrets.get_wallet_record") - @mock.patch("indy.non_secrets.delete_wallet_record") - async def test_delete_credential( - self, - mock_nonsec_del_wallet_record, - mock_nonsec_get_wallet_record, - mock_prover_del_cred, - ): - mock_nonsec_get_wallet_record.return_value = json.dumps( - { - "type": "typ", - "id": "ident", - "value": "value", - "tags": {"a": json.dumps("1"), "b": json.dumps("2")}, - } - ) - - credential = await self.holder.delete_credential("credential_id") - - mock_prover_del_cred.assert_called_once_with( - self.wallet.handle, "credential_id" - ) - - @mock.patch("indy.anoncreds.prover_delete_credential") - @mock.patch("indy.non_secrets.get_wallet_record") - @mock.patch("indy.non_secrets.delete_wallet_record") - async def test_delete_credential_x( - self, - mock_nonsec_del_wallet_record, - mock_nonsec_get_wallet_record, - mock_prover_del_cred, - ): - mock_nonsec_get_wallet_record.side_effect = test_module.StorageNotFoundError() - mock_prover_del_cred.side_effect = IndyError( - error_code=ErrorCode.WalletItemNotFound - ) - - with self.assertRaises(test_module.WalletNotFoundError): - await self.holder.delete_credential("credential_id") - mock_prover_del_cred.assert_called_once_with( - self.wallet.handle, "credential_id" - ) - - mock_prover_del_cred.side_effect = IndyError( - error_code=ErrorCode.CommonInvalidParam1 - ) - with self.assertRaises(test_module.IndyHolderError): - await self.holder.delete_credential("credential_id") - assert mock_prover_del_cred.call_count == 2 - - @mock.patch("indy.anoncreds.prover_create_proof") - async def test_create_presentation(self, mock_create_proof): - mock_create_proof.return_value = "{}" - PROOF_REQ = { - "nonce": "1554990836", - "name": "proof_req", - "version": "0.0", - "requested_attributes": { - "20_legalname_uuid": { - "name": "legalName", - "restrictions": [ - {"cred_def_id": "WgWxqztrNooG92RXvxSTWv:3:CL:20:tag"} - ], - } - }, - "requested_predicates": { - "21_jurisdictionid_GE_uuid": { - "name": "jurisdictionId", - "p_type": ">=", - "p_value": 1, - "restrictions": [ - {"cred_def_id": "WgWxqztrNooG92RXvxSTWv:3:CL:21:tag"} - ], - } - }, - } - - presentation_json = await self.holder.create_presentation( - PROOF_REQ, - "requested_credentials", - "schemas", - "credential_definitions", - ) - - mock_create_proof.assert_called_once_with( - self.wallet.handle, - json.dumps(PROOF_REQ), - json.dumps("requested_credentials"), - self.wallet.master_secret_id, - json.dumps("schemas"), - json.dumps("credential_definitions"), - json.dumps({}), - ) - - assert json.loads(presentation_json) == {} - - async def test_create_presentation_restr_attr_mismatch_x(self): - PROOF_REQS = [ - { - "nonce": "1554990836", - "name": "proof_req", - "version": "0.0", - "requested_attributes": { - "20_legalname_uuid": { - "name": "legalName", - "restrictions": [ - { - "cred_def_id": "WgWxqztrNooG92RXvxSTWv:3:CL:20:tag", - "attr::wrong::value": "Waffle Asteroid", - } - ], - } - }, - "requested_predicates": { - "21_jurisdictionid_GE_uuid": { - "name": "jurisdictionId", - "p_type": ">=", - "p_value": 1, - "restrictions": [ - {"cred_def_id": "WgWxqztrNooG92RXvxSTWv:3:CL:21:tag"} - ], - } - }, - }, - { - "nonce": "1554990836", - "name": "proof_req", - "version": "0.0", - "requested_attributes": { - "20_legalname_uuid": { - "names": ["legalName", "businessLang"], - "restrictions": [ - { - "cred_def_id": "WgWxqztrNooG92RXvxSTWv:3:CL:20:tag", - "attr::wrong::value": "Waffle Asteroid", - } - ], - } - }, - "requested_predicates": { - "21_jurisdictionid_GE_uuid": { - "name": "jurisdictionId", - "p_type": ">=", - "p_value": 1, - "restrictions": [ - {"cred_def_id": "WgWxqztrNooG92RXvxSTWv:3:CL:21:tag"} - ], - } - }, - }, - ] - - for proof_req in PROOF_REQS: - with self.assertRaises(IndyHolderError): - await self.holder.create_presentation( - proof_req, - "requested_credentials", - "schemas", - "credential_definitions", - ) - - async def test_create_revocation_state(self): - rr_state = { - "witness": {"omega": "1 ..."}, - "rev_reg": {"accum": "21 ..."}, - "timestamp": 1234567890, - } - - with mock.patch.object( - test_module, "create_tails_reader", mock.CoroutineMock() - ) as mock_create_tails_reader, mock.patch.object( - indy.anoncreds, "create_revocation_state", mock.CoroutineMock() - ) as mock_create_rr_state: - mock_create_rr_state.return_value = json.dumps(rr_state) - - cred_rev_id = "1" - rev_reg_def = {"def": 1} - rev_reg_delta = {"delta": 1} - timestamp = 1234567890 - tails_path = "/tmp/some.tails" - - result = await self.holder.create_revocation_state( - cred_rev_id, rev_reg_def, rev_reg_delta, timestamp, tails_path - ) - assert json.loads(result) == rr_state - - mock_create_rr_state.assert_awaited_once_with( - mock_create_tails_reader.return_value, - rev_reg_def_json=json.dumps(rev_reg_def), - cred_rev_id=cred_rev_id, - rev_reg_delta_json=json.dumps(rev_reg_delta), - timestamp=timestamp, - ) diff --git a/aries_cloudagent/indy/sdk/tests/test_issuer.py b/aries_cloudagent/indy/sdk/tests/test_issuer.py deleted file mode 100644 index d0946a65d7..0000000000 --- a/aries_cloudagent/indy/sdk/tests/test_issuer.py +++ /dev/null @@ -1,398 +0,0 @@ -import json -import pytest - -from unittest import mock -from unittest import IsolatedAsyncioTestCase - -from indy.error import ( - AnoncredsRevocationRegistryFullError, - ErrorCode, - IndyError, - WalletItemNotFound, -) - -from ....config.injection_context import InjectionContext -from ....indy.sdk.profile import IndySdkProfile -from ....indy.sdk.wallet_setup import IndyWalletConfig -from ....wallet.indy import IndySdkWallet -from ....ledger.indy import IndySdkLedgerPool - -from ...issuer import IndyIssuerRevocationRegistryFullError - -from .. import issuer as test_module - - -TEST_DID = "55GkHamhTU1ZbTbV2ab9DE" -SCHEMA_NAME = "resident" -SCHEMA_VERSION = "1.0" -SCHEMA_TXN = 1234 -SCHEMA_ID = f"{TEST_DID}:2:{SCHEMA_NAME}:{SCHEMA_VERSION}" -CRED_DEF_ID = f"{TEST_DID}:3:CL:{SCHEMA_TXN}:default" -REV_REG_ID = f"{TEST_DID}:4:{CRED_DEF_ID}:CL_ACCUM:0" -TEST_RR_DELTA = { - "ver": "1.0", - "value": {"prevAccum": "1 ...", "accum": "21 ...", "issued": [1, 2, 12, 42]}, -} - - -@pytest.mark.indy -class TestIndySdkIssuer(IsolatedAsyncioTestCase): - async def asyncSetUp(self): - self.context = InjectionContext() - self.context.injector.bind_instance( - IndySdkLedgerPool, IndySdkLedgerPool("name") - ) - - self.wallet = await IndyWalletConfig( - { - "auto_recreate": True, - "auto_remove": True, - "key": await IndySdkWallet.generate_wallet_key(), - "key_derivation_method": "RAW", - "name": "test-wallet", - } - ).create_wallet() - with mock.patch.object(IndySdkProfile, "_make_finalizer"): - self.profile = IndySdkProfile(self.wallet, self.context) - self.issuer = test_module.IndySdkIssuer(self.profile) - - async def tearDown(self): - await self.profile.close() - - async def test_repr(self): - assert "IndySdkIssuer" in str(self.issuer) # cover __repr__ - - @mock.patch("indy.anoncreds.issuer_create_and_store_credential_def") - async def test_schema_cred_def(self, mock_indy_cred_def): - assert ( - self.issuer.make_schema_id(TEST_DID, SCHEMA_NAME, SCHEMA_VERSION) - == SCHEMA_ID - ) - - (s_id, schema_json) = await self.issuer.create_schema( - TEST_DID, - SCHEMA_NAME, - SCHEMA_VERSION, - ["name", "moniker", "genre", "effective"], - ) - assert s_id == SCHEMA_ID - schema = json.loads(schema_json) - schema["seqNo"] = SCHEMA_TXN - - assert ( - self.issuer.make_credential_definition_id(TEST_DID, schema, tag="default") - == CRED_DEF_ID - ) - - mock_indy_cred_def.return_value = ( - CRED_DEF_ID, - json.dumps({"dummy": "cred-def"}), - ) - assert (CRED_DEF_ID, json.dumps({"dummy": "cred-def"})) == ( - await self.issuer.create_and_store_credential_definition( - TEST_DID, schema, support_revocation=True - ) - ) - - @mock.patch("indy.anoncreds.issuer_create_credential_offer") - async def test_credential_definition_in_wallet(self, mock_indy_create_offer): - mock_indy_create_offer.return_value = {"sample": "offer"} - assert await self.issuer.credential_definition_in_wallet(CRED_DEF_ID) - - @mock.patch("indy.anoncreds.issuer_create_credential_offer") - async def test_credential_definition_in_wallet_no(self, mock_indy_create_offer): - mock_indy_create_offer.side_effect = WalletItemNotFound( - error_code=ErrorCode.WalletItemNotFound - ) - assert not await self.issuer.credential_definition_in_wallet(CRED_DEF_ID) - - @mock.patch("indy.anoncreds.issuer_create_credential_offer") - async def test_credential_definition_in_wallet_x(self, mock_indy_create_offer): - mock_indy_create_offer.side_effect = IndyError( - error_code=ErrorCode.WalletInvalidHandle - ) - with self.assertRaises(test_module.IndyIssuerError): - await self.issuer.credential_definition_in_wallet(CRED_DEF_ID) - - @mock.patch("indy.anoncreds.issuer_create_credential_offer") - async def test_create_credential_offer(self, mock_create_offer): - test_offer = {"test": "offer"} - test_cred_def_id = "test-cred-def-id" - mock_create_offer.return_value = json.dumps(test_offer) - mock_profile = mock.MagicMock() - issuer = test_module.IndySdkIssuer(mock_profile) - offer_json = await issuer.create_credential_offer(test_cred_def_id) - assert json.loads(offer_json) == test_offer - mock_create_offer.assert_called_once_with( - mock_profile.wallet.handle, test_cred_def_id - ) - - @mock.patch("indy.anoncreds.issuer_create_credential") - @mock.patch.object(test_module, "create_tails_reader", autospec=True) - @mock.patch("indy.anoncreds.issuer_revoke_credential") - @mock.patch("indy.anoncreds.issuer_merge_revocation_registry_deltas") - async def test_create_revoke_credentials( - self, - mock_indy_merge_rr_deltas, - mock_indy_revoke_credential, - mock_tails_reader, - mock_indy_create_credential, - ): - test_schema = {"attrNames": ["attr1"]} - test_offer = { - "schema_id": SCHEMA_ID, - "cred_def_id": CRED_DEF_ID, - "key_correctness_proof": {"c": "...", "xz_cap": "...", "xr_cap": ["..."]}, - "nonce": "...", - } - test_request = {"test": "request"} - test_values = {"attr1": "value1"} - test_cred = { - "schema_id": SCHEMA_ID, - "cred_def_id": CRED_DEF_ID, - "rev_reg_id": REV_REG_ID, - "values": {"attr1": {"raw": "value1", "encoded": "123456123899216581404"}}, - "signature": {"...": "..."}, - "signature_correctness_proof": {"...": "..."}, - "rev_reg": {"accum": "21 12E8..."}, - "witness": {"omega": "21 1369..."}, - } - test_cred_rev_ids = ["42", "54"] - test_rr_delta = TEST_RR_DELTA - mock_indy_create_credential.side_effect = [ - ( - json.dumps(test_cred), - cr_id, - test_rr_delta, - ) - for cr_id in test_cred_rev_ids - ] - - with self.assertRaises(test_module.IndyIssuerError): # missing attribute - cred_json, revoc_id = await self.issuer.create_credential( - test_schema, - test_offer, - test_request, - {}, - ) - - (cred_json, cred_rev_id) = await self.issuer.create_credential( # main line - test_schema, - test_offer, - test_request, - test_values, - REV_REG_ID, - "/tmp/tails/path/dummy", - ) - mock_indy_create_credential.assert_called_once() - ( - call_wallet, - call_offer, - call_request, - call_values, - call_etc1, - call_etc2, - ) = mock_indy_create_credential.call_args[0] - assert call_wallet is self.wallet.handle - assert json.loads(call_offer) == test_offer - assert json.loads(call_request) == test_request - values = json.loads(call_values) - assert "attr1" in values - - mock_indy_revoke_credential.return_value = json.dumps(TEST_RR_DELTA) - mock_indy_merge_rr_deltas.return_value = json.dumps(TEST_RR_DELTA) - (result, failed) = await self.issuer.revoke_credentials( - CRED_DEF_ID, - REV_REG_ID, - tails_file_path="dummy", - cred_rev_ids=test_cred_rev_ids, - ) - assert json.loads(result) == TEST_RR_DELTA - assert not failed - assert mock_indy_revoke_credential.call_count == 2 - mock_indy_merge_rr_deltas.assert_called_once() - - @mock.patch("indy.anoncreds.issuer_create_credential") - @mock.patch.object(test_module, "create_tails_reader", autospec=True) - @mock.patch("indy.anoncreds.issuer_revoke_credential") - @mock.patch("indy.anoncreds.issuer_merge_revocation_registry_deltas") - async def test_create_revoke_credentials_x( - self, - mock_indy_merge_rr_deltas, - mock_indy_revoke_credential, - mock_tails_reader, - mock_indy_create_credential, - ): - test_schema = {"attrNames": ["attr1"]} - test_offer = { - "schema_id": SCHEMA_ID, - "cred_def_id": CRED_DEF_ID, - "key_correctness_proof": {"c": "...", "xz_cap": "...", "xr_cap": ["..."]}, - "nonce": "...", - } - test_request = {"test": "request"} - test_values = {"attr1": "value1"} - test_cred = { - "schema_id": SCHEMA_ID, - "cred_def_id": CRED_DEF_ID, - "rev_reg_id": REV_REG_ID, - "values": {"attr1": {"raw": "value1", "encoded": "123456123899216581404"}}, - "signature": {"...": "..."}, - "signature_correctness_proof": {"...": "..."}, - "rev_reg": {"accum": "21 12E8..."}, - "witness": {"omega": "21 1369..."}, - } - test_cred_rev_ids = ["42", "54", "103"] - test_rr_delta = TEST_RR_DELTA - mock_indy_create_credential.side_effect = [ - ( - json.dumps(test_cred), - cr_id, - test_rr_delta, - ) - for cr_id in test_cred_rev_ids - ] - - with self.assertRaises(test_module.IndyIssuerError): # missing attribute - cred_json, revoc_id = await self.issuer.create_credential( - test_schema, - test_offer, - test_request, - {}, - ) - - (cred_json, cred_rev_id) = await self.issuer.create_credential( # main line - test_schema, - test_offer, - test_request, - test_values, - REV_REG_ID, - "/tmp/tails/path/dummy", - ) - mock_indy_create_credential.assert_called_once() - ( - call_wallet, - call_offer, - call_request, - call_values, - call_etc1, - call_etc2, - ) = mock_indy_create_credential.call_args[0] - assert call_wallet is self.wallet.handle - assert json.loads(call_offer) == test_offer - assert json.loads(call_request) == test_request - values = json.loads(call_values) - assert "attr1" in values - - def mock_revoke(_h, _t, _r, cred_rev_id): - if cred_rev_id == "42": - return json.dumps(TEST_RR_DELTA) - if cred_rev_id == "54": - raise IndyError( - error_code=ErrorCode.AnoncredsInvalidUserRevocId, - error_details={"message": "already revoked"}, - ) - raise IndyError( - error_code=ErrorCode.UnknownCryptoTypeError, - error_details={"message": "truly an outlier"}, - ) - - mock_indy_revoke_credential.side_effect = mock_revoke - mock_indy_merge_rr_deltas.return_value = json.dumps(TEST_RR_DELTA) - (result, failed) = await self.issuer.revoke_credentials( - CRED_DEF_ID, - REV_REG_ID, - tails_file_path="dummy", - cred_rev_ids=test_cred_rev_ids, - ) - assert json.loads(result) == TEST_RR_DELTA - assert failed == ["54", "103"] - assert mock_indy_revoke_credential.call_count == 3 - mock_indy_merge_rr_deltas.assert_not_called() - - @mock.patch("indy.anoncreds.issuer_create_credential") - @mock.patch.object(test_module, "create_tails_reader", autospec=True) - async def test_create_credential_rr_full( - self, - mock_tails_reader, - mock_indy_create_credential, - ): - test_schema = {"attrNames": ["attr1"]} - test_offer = { - "schema_id": SCHEMA_ID, - "cred_def_id": CRED_DEF_ID, - "key_correctness_proof": {"c": "...", "xz_cap": "...", "xr_cap": ["..."]}, - "nonce": "...", - } - test_request = {"test": "request"} - test_values = {"attr1": "value1"} - test_credential = {"test": "credential"} - test_cred_rev_id = "42" - test_rr_delta = TEST_RR_DELTA - mock_indy_create_credential.side_effect = AnoncredsRevocationRegistryFullError( - error_code=ErrorCode.AnoncredsRevocationRegistryFullError - ) - - with self.assertRaises(IndyIssuerRevocationRegistryFullError): - await self.issuer.create_credential( - test_schema, - test_offer, - test_request, - test_values, - ) - - @mock.patch("indy.anoncreds.issuer_create_credential") - @mock.patch.object(test_module, "create_tails_reader", autospec=True) - async def test_create_credential_x_indy( - self, - mock_tails_reader, - mock_indy_create_credential, - ): - test_schema = {"attrNames": ["attr1"]} - test_offer = { - "schema_id": SCHEMA_ID, - "cred_def_id": CRED_DEF_ID, - "key_correctness_proof": {"c": "...", "xz_cap": "...", "xr_cap": ["..."]}, - "nonce": "...", - } - test_request = {"test": "request"} - test_values = {"attr1": "value1"} - test_credential = {"test": "credential"} - test_cred_rev_id = "42" - test_rr_delta = TEST_RR_DELTA - - mock_indy_create_credential.side_effect = IndyError( - error_code=ErrorCode.WalletInvalidHandle - ) - - with self.assertRaises(test_module.IndyIssuerError): - await self.issuer.create_credential( - test_schema, - test_offer, - test_request, - test_values, - ) - - @mock.patch("indy.anoncreds.issuer_create_and_store_revoc_reg") - @mock.patch.object(test_module, "create_tails_writer", autospec=True) - async def test_create_and_store_revocation_registry( - self, mock_indy_tails_writer, mock_indy_rr - ): - mock_indy_rr.return_value = ("a", "b", "c") - ( - rr_id, - rrdef_json, - rre_json, - ) = await self.issuer.create_and_store_revocation_registry( - TEST_DID, CRED_DEF_ID, "CL_ACCUM", "rr-tag", 100, "/tmp/tails/path" - ) - assert (rr_id, rrdef_json, rre_json) == ("a", "b", "c") - - @mock.patch("indy.anoncreds.issuer_merge_revocation_registry_deltas") - async def test_merge_revocation_registry_deltas(self, mock_indy_merge): - mock_indy_merge.return_value = json.dumps({"net": "delta"}) - assert {"net": "delta"} == json.loads( - await self.issuer.merge_revocation_registry_deltas( - {"fro": "delta"}, {"to": "delta"} - ) - ) diff --git a/aries_cloudagent/indy/sdk/tests/test_profile.py b/aries_cloudagent/indy/sdk/tests/test_profile.py deleted file mode 100644 index 660641e4d7..0000000000 --- a/aries_cloudagent/indy/sdk/tests/test_profile.py +++ /dev/null @@ -1,130 +0,0 @@ -import pytest - -from aries_cloudagent.tests import mock - -from ....config.injection_context import InjectionContext -from ....core.error import ProfileError -from ....ledger.base import BaseLedger -from ....ledger.indy import IndySdkLedgerPool - -from ..profile import IndySdkProfile -from ..wallet_setup import IndyOpenWallet, IndyWalletConfig - -from .. import profile as test_module - - -@pytest.fixture -async def open_wallet(): - opened = IndyOpenWallet( - config=IndyWalletConfig({"name": "test-profile"}), - created=True, - handle=1, - master_secret_id="master-secret", - ) - with mock.patch.object(opened, "close", mock.CoroutineMock()): - yield opened - - -@pytest.fixture() -async def profile(open_wallet): - context = InjectionContext() - context.injector.bind_instance(IndySdkLedgerPool, IndySdkLedgerPool("name")) - profile = IndySdkProfile(open_wallet, context) - - yield profile - - # Trigger finalizer before event loop fixture is closed - profile._finalizer() - - -@pytest.mark.asyncio -async def test_init_multi_ledger(open_wallet): - context = InjectionContext( - settings={ - "ledger.ledger_config_list": [ - { - "id": "BCovrinDev", - "is_production": True, - "is_write": True, - "endorser_did": "9QPa6tHvBHttLg6U4xvviv", - "endorser_alias": "endorser_dev", - "genesis_transactions": mock.MagicMock(), - }, - { - "id": "SovrinStagingNet", - "is_production": False, - "genesis_transactions": mock.MagicMock(), - }, - ] - } - ) - askar_profile = IndySdkProfile( - open_wallet, - context=context, - ) - - assert askar_profile.opened == open_wallet - assert askar_profile.settings["endorser.endorser_alias"] == "endorser_dev" - assert ( - askar_profile.settings["endorser.endorser_public_did"] - == "9QPa6tHvBHttLg6U4xvviv" - ) - assert (askar_profile.inject_or(BaseLedger)).pool_name == "BCovrinDev" - - -@pytest.mark.asyncio -async def test_properties(profile: IndySdkProfile): - assert profile.name == "test-profile" - assert profile.backend == "indy" - assert profile.wallet and profile.wallet.handle == 1 - - assert "IndySdkProfile" in str(profile) - assert profile.created - assert profile.wallet.created - assert profile.wallet.master_secret_id == "master-secret" - - with mock.patch.object(profile, "opened", False): - with pytest.raises(ProfileError): - await profile.remove() - - with mock.patch.object(profile.opened, "close", mock.CoroutineMock()): - await profile.remove() - assert profile.opened is None - - -def test_settings_genesis_transactions(open_wallet): - context = InjectionContext( - settings={"ledger.genesis_transactions": mock.MagicMock()} - ) - context.injector.bind_instance(IndySdkLedgerPool, IndySdkLedgerPool("name")) - profile = IndySdkProfile(open_wallet, context) - - -def test_settings_ledger_config(open_wallet): - context = InjectionContext( - settings={ - "ledger.ledger_config_list": [ - mock.MagicMock(), - mock.MagicMock(), - ] - } - ) - context.injector.bind_instance(IndySdkLedgerPool, IndySdkLedgerPool("name")) - profile = IndySdkProfile(open_wallet, context) - - -def test_read_only(open_wallet): - context = InjectionContext(settings={"ledger.read_only": True}) - context.injector.bind_instance(IndySdkLedgerPool, IndySdkLedgerPool("name")) - ro_profile = IndySdkProfile(open_wallet, context) - - -def test_finalizer(open_wallet): - profile = IndySdkProfile(open_wallet) - assert profile - with mock.patch.object(test_module, "LOGGER", autospec=True) as mock_logger: - profile._finalizer() - assert mock_logger.debug.call_count == 1 - mock_logger.debug.assert_called_once_with( - "Profile finalizer called; closing wallet" - ) diff --git a/aries_cloudagent/indy/sdk/tests/test_util.py b/aries_cloudagent/indy/sdk/tests/test_util.py deleted file mode 100644 index fb587d8f40..0000000000 --- a/aries_cloudagent/indy/sdk/tests/test_util.py +++ /dev/null @@ -1,47 +0,0 @@ -import pytest - -from shutil import rmtree - -import indy.blob_storage - -from aries_cloudagent.tests import mock -from unittest import IsolatedAsyncioTestCase - -from ...util import indy_client_dir, generate_pr_nonce - -from ..util import create_tails_reader, create_tails_writer - - -@pytest.mark.indy -class TestIndyUtils(IsolatedAsyncioTestCase): - TAILS_HASH = "8UW1Sz5cqoUnK9hqQk7nvtKK65t7Chu3ui866J23sFyJ" - - def tearDown(self): - tails_dir = indy_client_dir("tails", create=False) - rmtree(tails_dir, ignore_errors=True) - - async def test_tails_reader(self): - tails_dir = indy_client_dir("tails", create=True) - tails_local = f"{tails_dir}/{TestIndyUtils.TAILS_HASH}" - - with open(tails_local, "a") as f: - print("1234123412431234", file=f) - - with mock.patch.object( - indy.blob_storage, "open_reader", mock.CoroutineMock() - ) as mock_blob_open_reader: - result = await create_tails_reader(tails_local) - assert result == mock_blob_open_reader.return_value - - rmtree(tails_dir, ignore_errors=True) - with self.assertRaises(FileNotFoundError): - await create_tails_reader(tails_local) - - async def test_tails_writer(self): - tails_dir = indy_client_dir("tails", create=True) - assert await create_tails_writer(tails_dir) - - rmtree(tails_dir, ignore_errors=True) - - async def test_nonce(self): - assert await generate_pr_nonce() diff --git a/aries_cloudagent/indy/sdk/tests/test_verifier.py b/aries_cloudagent/indy/sdk/tests/test_verifier.py deleted file mode 100644 index 17eed7044d..0000000000 --- a/aries_cloudagent/indy/sdk/tests/test_verifier.py +++ /dev/null @@ -1,597 +0,0 @@ -import json -import pytest - -from copy import deepcopy - -from aries_cloudagent.tests import mock -from unittest import IsolatedAsyncioTestCase -from indy.error import IndyError - -from ....core.in_memory import InMemoryProfile -from ....ledger.multiple_ledger.ledger_requests_executor import ( - IndyLedgerRequestsExecutor, -) - -from ..verifier import IndySdkVerifier - - -INDY_PROOF_REQ_NAME = { - "nonce": "15606741555044336341559", - "name": "proof_req", - "version": "0.0", - "requested_attributes": { - "19_uuid": { - "name": "Preferred Name", - "restrictions": [{"cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:19:tag"}], - } - }, - "requested_predicates": {}, - "non_revoked": {"from": 1579892963, "to": 1579892963}, -} -INDY_PROOF_NAME = { - "proof": { - "proofs": [ - { - "primary_proof": { - "eq_proof": { - "revealed_attrs": { - "preferredname": "94607763023542937648705576709896212619553924110058781320304650334433495169960" - }, - "a_prime": "...", - "e": "...", - "v": "...", - "m": {"master_secret": "...", "musthave": "..."}, - "m2": "...", - }, - "ge_proofs": [], - }, - "non_revoc_proof": None, - } - ], - "aggregated_proof": {"c_hash": "...", "c_list": [[1, 152, 172, 159]]}, - }, - "requested_proof": { - "revealed_attrs": { - "19_uuid": { - "sub_proof_index": 0, - "raw": "Chicken Hawk", - "encoded": "94607763023542937648705576709896212619553924110058781320304650334433495169960", - } - }, - "self_attested_attrs": {}, - "unrevealed_attrs": {}, - "predicates": {}, - }, - "identifiers": [ - { - "schema_id": "LjgpST2rjsoxYegQDRm7EL:2:non-revo:1579888926.0", - "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:19:tag", - "rev_reg_id": "LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag:CL_ACCUM:0", - "timestamp": 1579892963, - } - ], -} - -INDY_PROOF_REQ_PRED_NAMES = { - "nonce": "12301197819298309547817", - "name": "proof_req", - "version": "0.0", - "requested_attributes": { - "18_uuid": { - "names": [ - "effectiveDate", - "jurisdictionId", - "endDate", - "legalName", - "orgTypeId", - ], - "restrictions": [{"cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag"}], - "non_revoked": {"from": 1579892963, "to": 1579892963}, - } - }, - "requested_predicates": { - "18_id_GE_uuid": { - "name": "id", - "p_type": ">=", - "p_value": 4, - "restrictions": [{"cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag"}], - "non_revoked": {"from": 1579892963, "to": 1579892963}, - }, - "18_busid_GE_uuid": { - "name": "busId", - "p_type": ">=", - "p_value": 11198760, - "restrictions": [{"cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag"}], - "non_revoked": {"from": 1579892963, "to": 1579892963}, - }, - }, -} - -INDY_PROOF_PRED_NAMES = { - "proof": { - "proofs": [ - { - "primary_proof": { - "eq_proof": { - "revealed_attrs": { - "effectivedate": "29898645652691622994103043707423726726370719600737126045061047957925549204159", - "enddate": "102987336249554097029535212322581322789799900648198034993379397001115665086549", - "jurisdictionid": "1", - "legalname": "106000828387368179355563788886235175190145445419967766011746391966411797095112", - "orgtypeid": "2", - }, - "a_prime": "15004053730967415956444780426929245426212215338862984979494015601906315582840747306504594441147441231491693951307278868750626954115009843921712832446544313561614118651520859494434080523236571958503756731624044004577892061145780190353067193496632483652558392939182437813999901132281095751156851493821923092362225145694407463842363472935374563198989094026343235461171230866928987229440390088485243428084237480758852248249770191814659757536925909483623366615812343227994433513635227834136882757394235805747686707186194818800509367263735891033464810268941516104197138946893490285348940539273855011764023467736767154303840", - "e": "337235637061060569047727820825037317712308782222370290484075504679799877011498224496826887984625822621748120654975531604507028064312710", - "v": "1404574530639210172781710601270953478414552186112985513475784201805119858770941821816370201652610496512142438596496007803868074196519046400754620766301997215969127187833134416898687892635798644651536667962849945968608408680347359937747715599800353850666709655353571249823190377976481837350280859973291095846106817174217510384400072134061086282647508604512946800721425580606901739211324799734725830882957974114794011791236006123406974194631084620463349145771704097181067806553409326685136263002734388842031423620455266365851581883497063570426034222596154597920580362387253753317413050267993785894175555001456331223234400596625730555935567545822248399326429854362227165802672227905967242505077485029657064067770641969647406371744932313880132835781524174868335472062214928455106355639700336515509695339440337721239602256387991397127509846614577908429409389332146746885470613002111095896313068354016587589778644661193149185049", - "m": { - "master_secret": "268741899404098839327031223989228937242803085201179726908056281850709820406283972250249379228789368664433583241086441517910928033266925485611584652328638784395957058632060633630", - "busid": "2687197004064133543257369626470144380098036289489284489320086515026620206692616047425976133587124290887441908383692364439260071404270430528078491104384060203570606253676528361400", - "id": "6686713960986576137959547581149682718587656100042127047852747024276212127252400140409890726173570723819441146289878657604374865560165489933600436341950054222778208816245032311193", - }, - "m2": "1180732317592917288409508571561928260151012766032216949553655321777067495114084046539503538100319204468787213948625648828272873800122130063408401311370987", - }, - "ge_proofs": [ - { - "u": { - "0": "15775609194986735920510151800942995799222803216082415810148803816296803079801357496664353906579826995829149362968465835795491271435248746459334118965204125314594582971550392227954", - "2": "5303150256152520023495176881750201750170184894234097909710215547442554470805609846521764595898911334528530696240025838754931022084938196723161868181531727845300439592437899863887", - "3": "3356711078459696620189646681109895593397921422760359051406583045001333345458592898545852513866307624143916692556089833035405496562577023756005223378326300905996972689863856066875", - "1": "9999991890173781186974768504758157527548652482914116775165195164578745484991479122468109103928320060297494255214338396428491092606606051561499468708339979065194763516537003502062", - }, - "r": { - "DELTA": "1435090146724677611480872211988213747514582597135551797832629955760022689079479873681839403744643599039883834204615937515288097736927712499250203649611222666450687692819628191366070914555251320872315378202337414304735555708434851449005494065128333408295370378734399236857073675785782330461793283646613324794741612075132251003819809779185772616720952264615331933630593162857145006881047266047864525898689246790061753791575361871922643386721142202508891103428155725164118848489256450446385140752308548079412012057535799088600334139468432242004848224338715577616491890083382988533414746224157485737168172255373805848589505587117269746736884630803388115258573174965628402748672653106950672620945656786479587908733067646954", - "2": "1256008392268065460119207279471943768595068414860014149178278435399371400930962253759162888062020269227529923329167742931240473191566641468995735758696802138379814852469042293401437137956018945170092817785505583108837356735852654194156319248319729732466320634812831259869290012647072233952795236462156213752008019954481267917886292498492055846838619027167304191382208021540244250507570410888356448310603088364351895116324030484480320862223729665151544010778941061938440283026451178172286282191179070116776836046514823559685428369094109958537683453915206656040875788707049636999992215238431149080323327641760705766913474027428111260788981734182250598573031877786769378931547452684486997457718460021235476398326984192784", - "0": "1106819115015372998825208031668263762670044285179584053573615157030968633235403572251376628759852167093842839880087972608252236859761641404161906797947710722723839146345785722305071566665021165225324459369096631275993154775947819333058721055644040212303789659749985973470385218248603826878093609862767077416104661216163222886987607841769251824856950498177308519655255200045046063857789306026581362754683975573850037114774186895901788964351934533171525025276070855270632786783019588176816118329221122549398793872640055312900842112041891936494042853798319986365192512964078607266631918748545903216736690057842950922926661694759259530843862322858400156976838479950950178486526234308178957984785053903260967594398611911474", - "3": "1344309321242892215222847471501532826517184846819833677474602495849657271930678291855112591971466462816524573183554788643533109793416521709602069842696124889558288092186793062177006244758779556603409762571362221142413760629539533275654542467194539359678435299002566931998816165917234259226849828723125451685169672272552524344813036153633311318760938874320338280443847065712732394378892985736654998112090834297537844732478643713076998558297751199030671616253345870616092528684635775411928128373368327191277066131632614473951005152162823879892345970535505519113833062530738837915987508410926372810518540478552946670006272356196419957933718303344632112441115930033837912179851905872564389256853587645059720488720795906498", - "1": "601693817301763663113031272722721908754633550776510238958619960119672962844730314111437951375084589705366750107667669458320527173785853103929038523863706300574327283273485302578112396814149563941340189390051835244496748959403476105143355455812570759887645896592114448469303958006046663589996470308366068555479184906610439541373120510006128200782324694975090482529033281708168823833732457689747330091963586305323138559502300486975246644545238909598413498252470653544977963083975726047754021026165970401681664501179243533611966433308438886961268871140737772352678991735861225177227793364352974323624694500485545573621034350559474030565509027433457718466600471361048730018443642651540442802817989514889987171548775560085", - }, - "mj": "2687197004064133543257369626470144380098036289489284489320086515026620206692616047425976133587124290887441908383692364439260071404270430528078491104384060203570606253676528361400", - "alpha": "55264634475788812054149982413198771839810724235465324658821557285735947681415835295178267002738090787902834904063083682990582592095393028970773939852521059447360650213986737569312363077820486616943853008592650537183003498185887824618357246364458614494253289122927160626742649252943244636915456680482390825080294565093068093917997001255757200832353046300646785756652682640188703523223073037006585218839054980180609464837830370157522462983934135435603408143309318659202555550473599548567996557919032937165600303958449173855781262863161799425917680286809410314205550551542955745937751254083650878398344461109371177805333303453760504594222290495116260958547048583654306199387054245295488649024179114894686831993370968945510894767150406222332165620064150891563554498413420757277508788138394747656372783710437243804659113648361274361422790365575", - "t": { - "2": "1276353167840913477021397624773394332173592088650367702185572394040398533199538101776458275797662881371280361310311170677242402214354355702620614537036611922064060504606618126681639882263139365680565350790281701009940301284340534766480451762902788628875609130151618956111512660983755135355570760793108220842022869639781026918247205511538713530652099730605791686827103126406846076633375908411453922078354225032716111673736810973402770388177401531928271370790938081733309345905963052715943136682338494175330354955277424030755355371412956250746882945100461786601740318616758180741835591171045104436982446340050589105952", - "0": "52506109491039096251755479392960889070840776962363540274456217953760113102006029814040519995494713986268145627084927516727099691151450378385140332116480118436738261593744184296007314732823898043080011956933010369575980799348117283597824162615912372823633177749168952698401203464607973674241038357379577293158404669765882906589960120865518413803711729942613061301420107178603192154873722316947550106277771120767826035047479123749931790881679576800340417944013614994751361795012191068369383577242249201927422484806926120532089036692818076818060938822432774203557319821915034796962936855918437128832683302834778450852076", - "1": "113031374658594175812052384858113115052077482873081996361152721528334589441352531310470368095073157716273853401381658707580502108484382463859531044307244944300120928991532655473230562771713806228238940140492981669914382036157400059197253018428984542349187927786210979478008036674432605219414300881116700073904513558719492127462395417843765324361843076852973933175787635618464392198807598044268223652564648024618437362752148593227485835178720349721798423100634521510710239416375840314170338898512726956877281625226003452828033987655579773273571285524048285234475184043290899568731903112287738739915600509899766360789888", - "DELTA": "48234611140928682288937615809872962358698394776719271528059766394227502012090856649758227578113306604028516575292703546202775777621049060595611852517094547384541819122623967215175704296901562660240718967260151010609870475975072516070346770954330313963878747194405855012585768501635077016535846206257741952202337055842434195875166686634891739392750890861333875772954056854354284061793365725202163447856793288398072711194949704852318180275797984445814279136152858759907525062790250675184786142234427994860090376938644764493873560454829155051260697226196758890394411645758956396137763703934929680277278644873416013261035", - "3": "89994605437763910628730772379416923861874648327020237340785010128698483324987645925227420742287632948945347297152300219419713493590120999381541274609870183955909628256613851122899039933589797230083354701292272442523280565440597314568786864750837443422276701528731625877274094155541825495114060437788769205202442723879088098866185978654728309516302335284177924161235100925296934812127598754913984676011716654143603885735997160890946409226842054727795290304131313120189570773196857529159798597569189742987994905034522112705638567954037460125246215182613760404547369876267284411245030884496328403051974209422359756153509", - }, - "predicate": { - "attr_name": "busid", - "p_type": "GE", - "value": 11198760, - }, - }, - { - "u": { - "0": "13639548796026429922431564475630909149287414026758460721805236736313279517016438050089911517098811596997747189614439260518531845477684148307804856579405503329745365642794423965550", - "2": "12692415150154152887167590190910159618471206042982658652940787170770193806407265717354418163057121876574358366510055892372348735991661901637525227498965237677355250159501068181772", - "3": "6699159556719214469836363462599679663866420825429540116943002714507804742697411533141864346616123740789790632843719915716457061440487115732563925309886301301835201778554620543295", - "1": "2018654799729593932888298230804022878883145101317651811950082851492082577094184498971399238402895197739207931768086301073280634251050932415705600476284738694155135236800581664160", - }, - "r": { - "1": "825587756964975640126314737718300012891046538726331178577448524710910340957817679849290109848304786342311186386453239759474660538454793939540876256076287017677140704068118361949660090673111340635478762304690817532764517905140299716866605223450803768338360729151901747687349983483402342999368967231581939563361347289212973086454185400770130710116840233323953976914342262402301362679497329671787598650893202541829399630505463177921655009726556920408538662140155815031458475909120161960047235187953148398737965729023268444789967620657212914775071615366971436269789139928904779054710447116218434690464549160131819794059427689273427325814904354192089075836597740878803445045080385629565176143354201573860707045668850877586", - "2": "1466408189748340763973829793343949568330918709265623621614464341218317503955515434953266875378586538446326464353600075579788794127665478299651259465473747112701101990004860122720151191106445704432013015062973865716673386400413561687311954374930156679604666267815298214479078026652043482916898087471155683856282470644588563159648375551108786970597383143516158031628710096807215305878905007543811401502472821013567629888746492557864905681554913361277548019219082051265255078152509205293776781790132507115787621452248689332496610099725566623311760857590035073594921664074567131690599897210005475078142722295326868452002437292574903183037228401231409631285848202575278151773369676950274790626198680132560950102001994557758", - "0": "993248502537248262082444202395290853332499246354083708269674970707520839045168624341335318664418224639164402187209309139427257892643191846187663592057257899679944076599283980872521437340751206357777926871742796186382563827967273141200749480590415594087209691507734426984052841712131263160951495974745152392404724577427973267669378931113495076274617344076060846279028767371296979484895771867209047720463195305161885422275388748188299814182891315332800557749699941587327916028930365349641271736635219800975554147836564077611147631789530042925759823398087582121686407890628257624663383236878047170688254415445440912626941967028065807021170264150964938678824504194752040131898249057197187446968567390619785928296680096859", - "3": "353677912339120670248802964352055631737613331947764251954000578577314223482877266750851861467829550374246392637478716468616296688578414836737374015352059254057436572686513161681724599053168679581126352074962010335889993562619355121275432902043064229165956511160994192882167562213269670332262473472293819501037932879123080023576285854568501212240875918139761976977842939660466373041805369493971290555885442554468124891943099059169515428968196495673746803133324864149723509564523971808556630671471618581233229134929554792186889060256901637092067130348403992303346483664985586122149628146304160243882639275298266216270358565584574585823864941692911554602002331492551293859949912337984877479524597804956696499812250631744", - "DELTA": "725960022886687948013207416539699149371621853290822104811918058808196468403337509381122781137942343897440199450987104988666229964851227549448628470704889721866971126265999067769808855341632931627785927114398786533559660381398895352266657934136549351825103362166280268159652759301507640976500533521688660251972577237532256300306442315564311264115224457865178259661593100327194825492692234619818096596609477148829377559407992257373097100180145505767561403356284282388735420784241021016181364636135275395790815788682767997871662899508826815736302921531147145381730507095314577476550947092200539059112480501048978059997520366967856033897452966490827003353334313372398949710717623991939354590550708881302450618430658953556", - }, - "mj": "6686713960986576137959547581149682718587656100042127047852747024276212127252400140409890726173570723819441146289878657604374865560165489933600436341950054222778208816245032311193", - "alpha": "54312418620368017823413444392364697511954099195960921763340447211826963863335156427497199363801396644023918819073111783414569539781613083737557451917064836287105910850065875966424715696905777194811070071499968289670886194094119693198719726955718469770568556811780821894560508716495412081426259363632509089906620904790770113618876886851367553577555822830824722182497761114967889600421573310792308390968429341290356015872285765321156360499004114406293720515635636721256956836801168192621092752489119545742530767529595705696014856308531466145146269599634259697543058622520958051728230537251854292098956994695268415292349999637984082162556184322623578612708830627477718675001902228134597558345283147625462346943667586065769392740787755841399970302076200764539143397370091692013055692886714129148712005056929884477612627289722508451690081998890", - "t": { - "3": "76842650074027971332631982512373611181628371639695357946107030911055453488768447213460618917725534086368162318588252003797289252473279448248400376609193928062810022884201102892017821282461806593568305060473753735848560048445524907113838106958747793434918052694775405184619214354190540002998204225798499364075579094271521191419027986291013493577021803670203051346914082929873231509819450163988354047777312127725561922611471445963909565688013793926876707562644935391518355932605047591545917637465241017629839541260483606708345518662351776889719949822005165906622964213143757683950646046295114922019124075069329268061942", - "2": "104991045405164906316724339229643785709360971949973916361929774804163421784479300621496063132861029493850348596359070365652827572699577454378465299784873962729586537933990712981855548459986825452865420618489151243413027040820258308949176618728507177438646401022030966936494703173837070422031040550750643315987178063356959004909489540688791639398005266908038895531691252968451136025538449648159989963830846794193607106472742567850015960071634812903985081979755017126350806404047244177458032873066418448813920685609285163826032405474833353441325867090653794998832828049943461795570528006274431422907140560130037296666626", - "0": "93372553202246858510371387492221683266873274595585378473760313800346458391438909787465170333251843314544241604938410847073082151810448655558028921073676767770394665021417900636520680814177493616534162641758512946743051333557436759523671912141418810442158225543010238061117969558203880853763255647243160765086932831295304550412607848190595598510980669944139696363322475177492264636536910201776020324858798972778323663795303339939472573927415127166116444898790357846635883222746031584554927383535016321617425087697872601850303134185636960112124926520878185699975818343081756286170638877967660840814776000077787223928056", - "1": "88156501142569212422367853754801651086852287000049991938144173063936879655987557042149874374683234911554850776079721311154420204826376746982087019508277766132575858575556680538019849786965963718649479179859820106973881788608463705644074554956818391872137271784803047333543321479251515998725336896820211102747123583803854741248907240437683401575881169746704849524328003061107258995982062254548684849294595639491104266371155934951313704136302996039897528196270331875472554549327417349243990461246127383357748906616773662459665620147625796186736530089927957522542298814250937114283836911153790542409683746775259226224961", - "DELTA": "6178997688515360528852083990605883033892934661031543684879979804577432521872124008044788245406591933749401429548633356472853716766388636618335206416158216292785839570827245139150787585027801572977051847786797012358936548986405917266204321163760568135873831346087680040040301251630530064206552793273933549993844498438010903850013120770715837075880286264742500598494248510064600863411203212869270221192303957773402376357672080257393369434247825409313396018869267942811592657266119556377402842108726474978400793026037873416208879964428023321485607453655856252140587803891157033568210852205447175844430607889546700526279", - }, - "predicate": { - "attr_name": "id", - "p_type": "GE", - "value": 4, - }, - }, - ], - }, - "non_revoc_proof": { - "x_list": { - "rho": "12B28F49BF5F2CDA105B904CD594EB5B5F558025CDDB7D0F3057D19830F81694", - "r": "010F2B872DC4BECAE085D9FA1FB98184C3E00181A678F2B256140482B4DEDFCE", - "r_prime": "1AAF1AB0071B64FE22AC42219962B9ABA02147B09DFDC4C1FD088E6706D312FC", - "r_prime_prime": "1630D0800ADE33816BCA96EE89EC1E312BE80220510FAFAAC52BED567B187017", - "r_prime_prime_prime": "14D06B2F7B369880821DAAFD40D74FE3B495EE3A7CB7E937FDC4E9707330F301", - "o": "147F16718A0CCB2EC137ECA3952C801FB2281B06CB4ADC24092CE31CA2EAC5AD", - "o_prime": "14F4668810341353E0C25485C4F3CF459BCB69DD74FF73376A23ACAA550E09C5", - "m": "0EAC754EE9AC81C495AC3BB302511034965965AF337BC4F43850A1636E70314E", - "m_prime": "07CA764055E9920E7C4417C3A63BF610C145F25D618A41DAC785210D7320F0EF", - "t": "199D84A1133FB103E4E77CC6F917A12355AD4A29DCCC24999A2C67EBD62B5306", - "t_prime": "07154F39841E3D75E1E07179875B94D655E3BDD4A349E0BBAA43422CC527AACB", - "m2": "0D4380FF8ACDC21611BC8AB83127950441DA42A49A347BEC6F32044F033D3017", - "s": "0A65AE9D0C0D4CDAA5D4EECB48BC6DFD2279BD2C040AC0D9011918A9E0A7A866", - "c": "0ABFC02DDF76995C48CADEE8447665EB933446FEC42E7074FB11720E141CFC07", - }, - "c_list": { - "e": "6 418D8713ED93CD8C065EA42D110C581C2CE07A58771077B1C2016E53AA2E7461 4 2032A4917D0877B9723CDCD82B32AC96C534B0CAA5ED2EE3FFD605214511CB1F 4 0D8E5DA074651A0DE91F86F663F071EA4D4CD4CBA438F7A4D182C8D23D01B485", - "d": "6 37635F35298D224C0E3F01EB06DC8AC1D8A7E048027162077E0204801F22FF94 4 1E64440E13B08BD249B5C35B637C70BDA471926F5F3896400ED25EDA4678B73D 4 3A5BB704B473894CD54C91D1D159A7BD8FA8092545F93D1BC195D52D3EC96EDE", - "a": "6 6000DC780B9D7C71575A328DE2BACB78A2737E9C1CE64BC8BCE98BD8486EAAB4 4 39555F38DB15EC820DA3A7A61820F831A003D414D4A0EF60D1D37ABD8B5E1070 4 25FBA1AD320F02D9082118E978B4FE261280951BCE1FED15F65771AE04F8E270", - "g": "6 5D293948EF43989ACBB8262B8C7F10A551AD71190D70B9AAA62943C4FE6A4C42 4 2B3E1ED0A00163DCA9AD9B69DDA124290CF8F580F60595B5E9D506A3C0D9A903 4 29C2B6F7AD7F7B223FC40BD9C1147FCE831B26ACB477855C9D3EABD7B0341342", - "w": "21 1371C921AE2377A1CD9F0D3E863B09487B6DFC0DC5F2BA32133E4F5EF2ACA5641 21 10B84BA9167755980B1DCD97AB698D56E3C9CDCBE7A85F0776A2C96B3BE9519BE 6 6676ADACEC607381F87211DAE4DE6A630B74FAF580DBC383D8450C7852BC09C4 4 379C9A4FF46DEBF21223823D5B2323F7A56A394157E94DB95914A9E5BB27FAEC 6 7121D621C85D9BA22371A0862909FF25198F0EF690207AEE3910FB0E0A7A4F62 4 1C052A0276360F0D8AEBA71BD65ECB233FFDB700F031EA03146CF00BC2F2D5B6", - "s": "21 1272F477F5A0F83CCB316DA088F6A6A12C131D0DC9BC699023F534624B8EE255A 21 13816855011465BE2E8972F52EE4692873A763513A764BD92B8B7CBBBAA27D7E8 6 7B190F599B5F0EA53802135BBD655B080743FE60CC22329F69770D6B765F0AAA 4 2AAA191CA59348C6A920BD1D1AE37A7C96F424B6D8E921B54EA5C1C7C56297AA 6 80254CA5DFBAD3C39BC757534922FBD0846AB86500D5D168109EB6B8A9D2BE33 4 1CC93B3769A7BE2AF52CCE391D2BB57F9D907F530038EF84B3EC4AB54D62D872", - "u": "21 11E538813B74EFC8676EF5AC87AA05A0FF58913B7C68E264FCF5ED0D57F6BC781 21 12EE7BE65E15CF4C500E2D92DB02670FBD8B51C6BD0B35AE139E9CE9658B15CC2 6 856B3C0C152F75A449AD73DFAD7DFD87A99AAA606E3D8E392D765E3C987D7B47 4 34245F01BD7C4144DBEBE7AB35303BF02FB5717EC6B080BC9C2C930D929D4ED7 6 8113F127D8762B174DCB32AEE370297BF7CFCCF797510B53D53564AEC9105909 4 3B2434AD9AB4E7ABA7125800D14470A098AE04FA523CB60A6FFF62D371B95E13", - }, - }, - } - ], - "aggregated_proof": { - "c_hash": "37672016063516849654668323232510746418703126727195560560658262517075578769045", - "c_list": [ - [4, 0, 0, 0, 0, 0], - [4, 17, 153, 0, 0, 0, 0], - [4, 0, 0, 0, 0, 0, 0, 0], - [4, 1, 134, 126, 0, 0, 0, 0], - [10, 250, 248, 125, 158, 54, 165, 91, 59, 1], - [4, 167, 169, 22, 44], - [31, 254, 53], - [118, 218, 1, 27, 51, 96], - [1, 254, 120, 236], - [3, 127, 97, 134, 148, 32, 128], - [10, 124, 191, 32], - [32, 59, 96, 254, 165], - [195, 171, 64, 72, 40, 235], - [2, 175, 185, 172, 248], - [2, 152, 166, 185, 65], - [3, 63, 176, 24, 2], - [2, 96, 182, 196, 220, 182, 246], - [48, 242, 116, 58, 18, 199], - ], - }, - }, - "requested_proof": { - "revealed_attrs": {}, - "revealed_attr_groups": { - "18_uuid": { - "sub_proof_index": 0, - "values": { - "effectiveDate": { - "raw": "2018-01-01", - "encoded": "29898645652691622994103043707423726726370719600737126045061047957925549204159", - }, - "endDate": { - "raw": "", - "encoded": "102987336249554097029535212322581322789799900648198034993379397001115665086549", - }, - "jurisdictionId": {"raw": "1", "encoded": "1"}, - "legalName": { - "raw": "Flan Nebula", - "encoded": "106000828387368179355563788886235175190145445419967766011746391966411797095112", - }, - "orgTypeId": {"raw": "2", "encoded": "2"}, - }, - } - }, - "self_attested_attrs": {}, - "unrevealed_attrs": {}, - "predicates": { - "18_busid_GE_uuid": {"sub_proof_index": 0}, - "18_id_GE_uuid": {"sub_proof_index": 0}, - }, - }, - "identifiers": [ - { - "schema_id": "LjgpST2rjsoxYegQDRm7EL:2:bc-reg:1.0", - "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag", - "rev_reg_id": "LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag:CL_ACCUM:0", - "timestamp": 1579892963, - } - ], -} - -REV_REG_DEFS = { - "LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag:CL_ACCUM:0": { - "txnTime": 1500000000 - } -} - - -@pytest.mark.indy -class TestIndySdkVerifier(IsolatedAsyncioTestCase): - def setUp(self): - self.ledger = mock.MagicMock( - get_credential_definition=mock.CoroutineMock( - return_value={ - "...": "...", - "value": { - "revocation": { - "g": "1 ...", - "g_dash": "1 ...", - "h": "1 ...", - "h0": "1 ...", - "h1": "1 ...", - "h2": "1 ...", - "htilde": "1 ...", - "h_cap": "1 ...", - "u": "1 ...", - "pk": "1 ...", - "y": "1 ...", - } - }, - } - ) - ) - mock_profile = InMemoryProfile.test_profile() - context = mock_profile.context - context.injector.bind_instance( - IndyLedgerRequestsExecutor, IndyLedgerRequestsExecutor(mock_profile) - ) - self.verifier = IndySdkVerifier(mock_profile) - assert repr(self.verifier) == "" - - @mock.patch("indy.anoncreds.verifier_verify_proof") - async def test_verify_presentation(self, mock_verify): - mock_verify.return_value = "val" - - with mock.patch.object( - self.verifier, "pre_verify", mock.CoroutineMock() - ) as mock_pre_verify, mock.patch.object( - self.verifier, "non_revoc_intervals", mock.MagicMock() - ) as mock_non_revox, mock.patch.object( - IndyLedgerRequestsExecutor, "get_ledger_for_identifier" - ) as mock_get_ledger: - mock_get_ledger.return_value = (None, self.ledger) - INDY_PROOF_REQ_X = deepcopy(INDY_PROOF_REQ_PRED_NAMES) - (verified, msgs) = await self.verifier.verify_presentation( - INDY_PROOF_REQ_X, - INDY_PROOF_PRED_NAMES, - "schemas", - {"LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag": {"value": {"revocation": {}}}}, - REV_REG_DEFS, - "rev_reg_entries", - ) - - mock_verify.assert_called_once_with( - json.dumps(INDY_PROOF_REQ_X), - json.dumps(INDY_PROOF_PRED_NAMES), - json.dumps("schemas"), - json.dumps( - {"LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag": {"value": {"revocation": {}}}} - ), - json.dumps(REV_REG_DEFS), - json.dumps("rev_reg_entries"), - ) - - assert verified == "val" - - @mock.patch("indy.anoncreds.verifier_verify_proof") - async def test_verify_presentation_x_indy(self, mock_verify): - mock_verify.side_effect = IndyError(error_code=1) - - with mock.patch.object( - self.verifier, "pre_verify", mock.CoroutineMock() - ) as mock_pre_verify, mock.patch.object( - self.verifier, "non_revoc_intervals", mock.MagicMock() - ) as mock_non_revox, mock.patch.object( - IndyLedgerRequestsExecutor, "get_ledger_for_identifier" - ) as mock_get_ledger: - mock_get_ledger.return_value = ("test", self.ledger) - (verified, msgs) = await self.verifier.verify_presentation( - INDY_PROOF_REQ_NAME, - INDY_PROOF_NAME, - "schemas", - {"LjgpST2rjsoxYegQDRm7EL:3:CL:19:tag": {"value": {}}}, - REV_REG_DEFS, - "rev_reg_entries", - ) - - mock_verify.assert_called_once_with( - json.dumps(INDY_PROOF_REQ_NAME), - json.dumps(INDY_PROOF_NAME), - json.dumps("schemas"), - json.dumps({"LjgpST2rjsoxYegQDRm7EL:3:CL:19:tag": {"value": {}}}), - json.dumps(REV_REG_DEFS), - json.dumps("rev_reg_entries"), - ) - - assert not verified - - @mock.patch("indy.anoncreds.verifier_verify_proof") - async def test_check_encoding_attr(self, mock_verify): - with mock.patch.object( - IndyLedgerRequestsExecutor, "get_ledger_for_identifier" - ) as mock_get_ledger: - mock_get_ledger.return_value = (None, self.ledger) - mock_verify.return_value = True - (verified, msgs) = await self.verifier.verify_presentation( - INDY_PROOF_REQ_NAME, - INDY_PROOF_NAME, - "schemas", - {"LjgpST2rjsoxYegQDRm7EL:3:CL:19:tag": {"value": {}}}, - REV_REG_DEFS, - "rev_reg_entries", - ) - - mock_verify.assert_called_once_with( - json.dumps(INDY_PROOF_REQ_NAME), - json.dumps(INDY_PROOF_NAME), - json.dumps("schemas"), - json.dumps({"LjgpST2rjsoxYegQDRm7EL:3:CL:19:tag": {"value": {}}}), - json.dumps(REV_REG_DEFS), - json.dumps("rev_reg_entries"), - ) - assert verified is True - assert len(msgs) == 1 - assert "TS_OUT_NRI::19_uuid" in msgs - - @mock.patch("indy.anoncreds.verifier_verify_proof") - async def test_check_encoding_attr_tamper_raw(self, mock_verify): - INDY_PROOF_X = deepcopy(INDY_PROOF_NAME) - INDY_PROOF_X["requested_proof"]["revealed_attrs"]["19_uuid"][ - "raw" - ] = "Mock chicken" - with mock.patch.object( - IndyLedgerRequestsExecutor, "get_ledger_for_identifier" - ) as mock_get_ledger: - mock_get_ledger.return_value = ("test", self.ledger) - (verified, msgs) = await self.verifier.verify_presentation( - INDY_PROOF_REQ_NAME, - INDY_PROOF_X, - "schemas", - {"LjgpST2rjsoxYegQDRm7EL:3:CL:19:tag": {"value": {}}}, - REV_REG_DEFS, - "rev_reg_entries", - ) - - mock_verify.assert_not_called() - - assert verified is False - assert len(msgs) == 2 - assert "TS_OUT_NRI::19_uuid" in msgs - assert ( - "VALUE_ERROR::Encoded representation mismatch for 'Preferred Name'" in msgs - ) - - @mock.patch("indy.anoncreds.verifier_verify_proof") - async def test_check_encoding_attr_tamper_encoded(self, mock_verify): - INDY_PROOF_X = deepcopy(INDY_PROOF_NAME) - INDY_PROOF_X["requested_proof"]["revealed_attrs"]["19_uuid"][ - "encoded" - ] = "1234567890" - with mock.patch.object( - IndyLedgerRequestsExecutor, "get_ledger_for_identifier" - ) as mock_get_ledger: - mock_get_ledger.return_value = (None, self.ledger) - (verified, msgs) = await self.verifier.verify_presentation( - INDY_PROOF_REQ_NAME, - INDY_PROOF_X, - "schemas", - {"LjgpST2rjsoxYegQDRm7EL:3:CL:19:tag": {"value": {}}}, - REV_REG_DEFS, - "rev_reg_entries", - ) - - mock_verify.assert_not_called() - - assert verified is False - assert len(msgs) == 2 - assert "TS_OUT_NRI::19_uuid" in msgs - assert ( - "VALUE_ERROR::Encoded representation mismatch for 'Preferred Name'" in msgs - ) - - @mock.patch("indy.anoncreds.verifier_verify_proof") - async def test_check_pred_names(self, mock_verify): - with mock.patch.object( - IndyLedgerRequestsExecutor, "get_ledger_for_identifier" - ) as mock_get_ledger: - mock_get_ledger.return_value = ("test", self.ledger) - mock_verify.return_value = True - INDY_PROOF_REQ_X = deepcopy(INDY_PROOF_REQ_PRED_NAMES) - (verified, msgs) = await self.verifier.verify_presentation( - INDY_PROOF_REQ_X, - INDY_PROOF_PRED_NAMES, - "schemas", - {"LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag": {"value": {"revocation": {}}}}, - REV_REG_DEFS, - "rev_reg_entries", - ) - - mock_verify.assert_called_once_with( - json.dumps(INDY_PROOF_REQ_X), - json.dumps(INDY_PROOF_PRED_NAMES), - json.dumps("schemas"), - json.dumps( - {"LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag": {"value": {"revocation": {}}}} - ), - json.dumps(REV_REG_DEFS), - json.dumps("rev_reg_entries"), - ) - - assert verified is True - assert len(msgs) == 3 - assert "TS_OUT_NRI::18_uuid" in msgs - assert "TS_OUT_NRI::18_id_GE_uuid" in msgs - assert "TS_OUT_NRI::18_busid_GE_uuid" in msgs - - @mock.patch("indy.anoncreds.verifier_verify_proof") - async def test_check_pred_names_tamper_pred_value(self, mock_verify): - INDY_PROOF_X = deepcopy(INDY_PROOF_PRED_NAMES) - INDY_PROOF_X["proof"]["proofs"][0]["primary_proof"]["ge_proofs"][0][ - "predicate" - ]["value"] = 0 - with mock.patch.object( - IndyLedgerRequestsExecutor, "get_ledger_for_identifier" - ) as mock_get_ledger: - mock_get_ledger.return_value = (None, self.ledger) - (verified, msgs) = await self.verifier.verify_presentation( - deepcopy(INDY_PROOF_REQ_PRED_NAMES), - INDY_PROOF_X, - "schemas", - {"LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag": {"value": {}}}, - REV_REG_DEFS, - "rev_reg_entries", - ) - - mock_verify.assert_not_called() - - assert verified is False - assert len(msgs) == 4 - assert "RMV_RFNT_NRI::18_uuid" in msgs - assert "RMV_RFNT_NRI::18_busid_GE_uuid" in msgs - assert "RMV_RFNT_NRI::18_id_GE_uuid" in msgs - assert ( - "VALUE_ERROR::Timestamp on sub-proof #0 is superfluous vs. requested attribute group 18_uuid" - in msgs - ) - - @mock.patch("indy.anoncreds.verifier_verify_proof") - async def test_check_pred_names_tamper_pred_req_attr(self, mock_verify): - INDY_PROOF_REQ_X = deepcopy(INDY_PROOF_REQ_PRED_NAMES) - INDY_PROOF_REQ_X["requested_predicates"]["18_busid_GE_uuid"]["name"] = "dummy" - with mock.patch.object( - IndyLedgerRequestsExecutor, "get_ledger_for_identifier" - ) as mock_get_ledger: - mock_get_ledger.return_value = (None, self.ledger) - (verified, msgs) = await self.verifier.verify_presentation( - INDY_PROOF_REQ_X, - INDY_PROOF_PRED_NAMES, - "schemas", - {"LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag": {"value": {}}}, - REV_REG_DEFS, - "rev_reg_entries", - ) - - mock_verify.assert_not_called() - - assert verified is False - assert len(msgs) == 4 - assert "RMV_RFNT_NRI::18_uuid" in msgs - assert "RMV_RFNT_NRI::18_busid_GE_uuid" in msgs - assert "RMV_RFNT_NRI::18_id_GE_uuid" in msgs - assert ( - "VALUE_ERROR::Timestamp on sub-proof #0 is superfluous vs. requested attribute group 18_uuid" - in msgs - ) - - @mock.patch("indy.anoncreds.verifier_verify_proof") - async def test_check_pred_names_tamper_attr_groups(self, mock_verify): - INDY_PROOF_X = deepcopy(INDY_PROOF_PRED_NAMES) - INDY_PROOF_X["requested_proof"]["revealed_attr_groups"]["x_uuid"] = ( - INDY_PROOF_X["requested_proof"]["revealed_attr_groups"].pop("18_uuid") - ) - with mock.patch.object( - IndyLedgerRequestsExecutor, "get_ledger_for_identifier" - ) as mock_get_ledger: - mock_get_ledger.return_value = ("test", self.ledger) - (verified, msgs) = await self.verifier.verify_presentation( - deepcopy(INDY_PROOF_REQ_PRED_NAMES), - INDY_PROOF_X, - "schemas", - {"LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag": {"value": {}}}, - REV_REG_DEFS, - "rev_reg_entries", - ) - - mock_verify.assert_not_called() - - assert verified is False - assert len(msgs) == 3 - assert "RMV_RFNT_NRI::18_busid_GE_uuid" in msgs - assert "RMV_RFNT_NRI::18_id_GE_uuid" in msgs - assert "VALUE_ERROR::Missing requested attribute group 18_uuid" in msgs diff --git a/aries_cloudagent/indy/sdk/tests/test_wallet_plugin.py b/aries_cloudagent/indy/sdk/tests/test_wallet_plugin.py deleted file mode 100644 index 1ca4f752f5..0000000000 --- a/aries_cloudagent/indy/sdk/tests/test_wallet_plugin.py +++ /dev/null @@ -1,125 +0,0 @@ -from unittest import mock -from unittest import IsolatedAsyncioTestCase - - -from .. import wallet_plugin as test_module - - -class TestWalletCrypto(IsolatedAsyncioTestCase): - def setUp(self): - test_module.LOADED = False - - async def test_file_ext(self): - assert test_module.file_ext() - - def test_load_postgres_plugin(self): - storage_config = '{"wallet_scheme":"MultiWalletSingleTable"}' - storage_creds = '{"account":"test"}' - mock_stg_lib = mock.MagicMock( - postgresstorage_init=mock.MagicMock(return_value=0), - init_storagetype=mock.MagicMock(return_value=0), - ) - with mock.patch.object( - test_module.cdll, "LoadLibrary", mock.Mock() - ) as mock_load: - mock_load.return_value = mock_stg_lib - test_module.load_postgres_plugin(storage_config, storage_creds) - - assert test_module.LOADED - - def test_load_postgres_plugin_init_x_raise(self): - storage_config = '{"wallet_scheme":"MultiWalletSingleTable"}' - storage_creds = '{"account":"test"}' - mock_stg_lib = mock.MagicMock( - postgresstorage_init=mock.MagicMock(return_value=2) - ) - with mock.patch.object( - test_module.cdll, "LoadLibrary", mock.Mock() - ) as mock_load: - mock_load.return_value = mock_stg_lib - with self.assertRaises(OSError) as context: - test_module.load_postgres_plugin( - storage_config, storage_creds, raise_exc=True - ) - assert "unable to load postgres" in str(context.exception) - - def test_load_postgres_plugin_init_x_exit(self): - storage_config = '{"wallet_scheme":"MultiWalletSingleTable"}' - storage_creds = '{"account":"test"}' - mock_stg_lib = mock.MagicMock( - postgresstorage_init=mock.MagicMock(return_value=2) - ) - with mock.patch.object( - test_module.cdll, "LoadLibrary", mock.Mock() - ) as mock_load: - mock_load.return_value = mock_stg_lib - with self.assertRaises(SystemExit): - test_module.load_postgres_plugin( - storage_config, storage_creds, raise_exc=False - ) - - def test_load_postgres_plugin_config_x_raise(self): - storage_config = '{"wallet_scheme":"MultiWalletSingleTable"}' - storage_creds = '{"account":"test"}' - mock_stg_lib = mock.MagicMock( - postgresstorage_init=mock.MagicMock(return_value=0), - init_storagetype=mock.MagicMock(return_value=2), - ) - with mock.patch.object( - test_module.cdll, "LoadLibrary", mock.Mock() - ) as mock_load: - mock_load.return_value = mock_stg_lib - with self.assertRaises(OSError) as context: - test_module.load_postgres_plugin( - storage_config, storage_creds, raise_exc=True - ) - assert "unable to configure postgres" in str(context.exception) - - def test_load_postgres_plugin_config_x_exit(self): - storage_config = '{"wallet_scheme":"MultiWalletSingleTable"}' - storage_creds = '{"account":"test"}' - mock_stg_lib = mock.MagicMock( - postgresstorage_init=mock.MagicMock(return_value=0), - init_storagetype=mock.MagicMock(return_value=2), - ) - with mock.patch.object( - test_module.cdll, "LoadLibrary", mock.Mock() - ) as mock_load: - mock_load.return_value = mock_stg_lib - with self.assertRaises(SystemExit): - test_module.load_postgres_plugin( - storage_config, storage_creds, raise_exc=False - ) - - def test_load_postgres_plugin_bad_json_x_raise(self): - storage_config = '{"wallet_scheme":"MultiWalletSingleTable"}' - storage_creds = '"account":"test"' - mock_stg_lib = mock.MagicMock( - postgresstorage_init=mock.MagicMock(return_value=0), - init_storagetype=mock.MagicMock(return_value=2), - ) - with mock.patch.object( - test_module.cdll, "LoadLibrary", mock.Mock() - ) as mock_load: - mock_load.return_value = mock_stg_lib - with self.assertRaises(OSError) as context: - test_module.load_postgres_plugin( - storage_config, storage_creds, raise_exc=True - ) - assert "Invalid stringified JSON input" in str(context.exception) - - def test_load_postgres_plugin_bad_json_x_exit(self): - storage_config = '"wallet_scheme":"MultiWalletSingleTable"' - storage_creds = '{"account":"test"}' - mock_stg_lib = mock.MagicMock( - postgresstorage_init=mock.MagicMock(return_value=0), - init_storagetype=mock.MagicMock(return_value=2), - ) - with mock.patch.object( - test_module.cdll, "LoadLibrary", mock.Mock() - ) as mock_load: - mock_load.return_value = mock_stg_lib - with self.assertRaises(SystemExit): - test_module.load_postgres_plugin( - storage_config, storage_creds, raise_exc=False - ) diff --git a/aries_cloudagent/indy/sdk/util.py b/aries_cloudagent/indy/sdk/util.py deleted file mode 100644 index 549d06e965..0000000000 --- a/aries_cloudagent/indy/sdk/util.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Indy utilities.""" - -import json - -from pathlib import Path - -import indy.blob_storage - - -async def create_tails_reader(tails_file_path: str) -> int: - """Get a handle for the blob_storage file reader.""" - tails_file_path = Path(tails_file_path) - - if not tails_file_path.exists(): - raise FileNotFoundError("Tails file does not exist.") - - tails_reader_config = json.dumps( - { - "base_dir": str(tails_file_path.parent.absolute()), - "file": str(tails_file_path.name), - } - ) - return await indy.blob_storage.open_reader("default", tails_reader_config) - - -async def create_tails_writer(tails_base_dir: str) -> int: - """Get a handle for the blob_storage file writer.""" - tails_writer_config = json.dumps({"base_dir": tails_base_dir, "uri_pattern": ""}) - return await indy.blob_storage.open_writer("default", tails_writer_config) diff --git a/aries_cloudagent/indy/sdk/verifier.py b/aries_cloudagent/indy/sdk/verifier.py deleted file mode 100644 index 81bdc5a601..0000000000 --- a/aries_cloudagent/indy/sdk/verifier.py +++ /dev/null @@ -1,88 +0,0 @@ -"""Indy SDK verifier implementation.""" - -import json -import logging - -from typing import Tuple - -import indy.anoncreds -from indy.error import IndyError - -from ...core.profile import Profile - -from ..verifier import IndyVerifier, PresVerifyMsg - -LOGGER = logging.getLogger(__name__) - - -class IndySdkVerifier(IndyVerifier): - """Indy-SDK verifier implementation.""" - - def __init__(self, profile: Profile): - """Initialize an IndyVerifier instance. - - Args: - profile: Active Profile instance - - """ - self.profile = profile - - async def verify_presentation( - self, - pres_req, - pres, - schemas, - credential_definitions, - rev_reg_defs, - rev_reg_entries, - ) -> Tuple[bool, list]: - """Verify a presentation. - - Args: - pres_req: Presentation request data - pres: Presentation data - schemas: Schema data - credential_definitions: credential definition data - rev_reg_defs: revocation registry definitions - rev_reg_entries: revocation registry entries - """ - - LOGGER.debug(f">>> received presentation: {pres}") - LOGGER.debug(f">>> for pres_req: {pres_req}") - msgs = [] - try: - msgs += self.non_revoc_intervals(pres_req, pres, credential_definitions) - msgs += await self.check_timestamps( - self.profile, pres_req, pres, rev_reg_defs - ) - msgs += await self.pre_verify(pres_req, pres) - except ValueError as err: - s = str(err) - msgs.append(f"{PresVerifyMsg.PRES_VALUE_ERROR.value}::{s}") - LOGGER.error( - f"Presentation on nonce={pres_req['nonce']} " - f"cannot be validated: {str(err)}" - ) - return (False, msgs) - - LOGGER.debug(f">>> verifying presentation: {pres}") - LOGGER.debug(f">>> for pres_req: {pres_req}") - try: - verified = await indy.anoncreds.verifier_verify_proof( - json.dumps(pres_req), - json.dumps(pres), - json.dumps(schemas), - json.dumps(credential_definitions), - json.dumps(rev_reg_defs), - json.dumps(rev_reg_entries), - ) - except IndyError as err: - s = str(err) - msgs.append(f"{PresVerifyMsg.PRES_VERIFY_ERROR.value}::{s}") - LOGGER.exception( - f"Validation of presentation on nonce={pres_req['nonce']} " - "failed with error" - ) - verified = False - - return (verified, msgs) diff --git a/aries_cloudagent/indy/sdk/wallet_plugin.py b/aries_cloudagent/indy/sdk/wallet_plugin.py deleted file mode 100644 index 8dbdb24775..0000000000 --- a/aries_cloudagent/indy/sdk/wallet_plugin.py +++ /dev/null @@ -1,63 +0,0 @@ -"""Utility for loading Postgres wallet plug-in.""" - -import logging -import platform -import json -from ctypes import cdll, c_char_p - -EXTENSION = {"darwin": ".dylib", "linux": ".so", "win32": ".dll", "windows": ".dll"} -LOADED = False -LOGGER = logging.getLogger(__name__) - - -def file_ext(): - """Determine file extension based on platform.""" - your_platform = platform.system().lower() - return EXTENSION[your_platform] if (your_platform in EXTENSION) else ".so" - - -def load_postgres_plugin(storage_config, storage_creds, raise_exc=False): - """Load postgres dll and configure postgres wallet.""" - global LOADED, LOGGER - - if not LOADED: - LOGGER.info( - "Checking input postgres storage_config and storage_creds arguments" - ) - try: - json.loads(storage_config) - json.loads(storage_creds) - except json.decoder.JSONDecodeError: - LOGGER.error( - "Invalid stringified JSON input, check storage_config and storage_creds" - ) - if raise_exc: - raise OSError( - "Invalid stringified JSON input, " - "check storage_config and storage_creds" - ) - else: - raise SystemExit(1) - - LOGGER.info("Initializing postgres wallet") - stg_lib = cdll.LoadLibrary("libindystrgpostgres" + file_ext()) - result = stg_lib.postgresstorage_init() - if result != 0: - LOGGER.error("Error unable to load postgres wallet storage: %s", result) - if raise_exc: - raise OSError(f"Error unable to load postgres wallet storage: {result}") - else: - raise SystemExit(1) - if "wallet_scheme" in storage_config: - c_config = c_char_p(storage_config.encode("utf-8")) - c_credentials = c_char_p(storage_creds.encode("utf-8")) - result = stg_lib.init_storagetype(c_config, c_credentials) - if result != 0: - LOGGER.error("Error unable to configure postgres stg: %s", result) - if raise_exc: - raise OSError(f"Error unable to configure postgres stg: {result}") - else: - raise SystemExit(1) - LOADED = True - - LOGGER.info("Success, loaded postgres wallet storage") diff --git a/aries_cloudagent/indy/sdk/wallet_setup.py b/aries_cloudagent/indy/sdk/wallet_setup.py deleted file mode 100644 index 59e8dc3921..0000000000 --- a/aries_cloudagent/indy/sdk/wallet_setup.py +++ /dev/null @@ -1,233 +0,0 @@ -"""Indy-SDK wallet setup and configuration.""" - -import json -import logging - -from typing import Any, Mapping - -import indy.anoncreds -import indy.did -import indy.crypto -import indy.wallet - -from indy.error import IndyError, ErrorCode - -from ...core.error import ProfileError, ProfileDuplicateError, ProfileNotFoundError -from ...core.profile import Profile - -from .error import IndyErrorHandler -from .wallet_plugin import load_postgres_plugin - -LOGGER = logging.getLogger(__name__) - - -class IndyWalletConfig: - """A helper class for handling Indy-SDK wallet configuration.""" - - DEFAULT_FRESHNESS = False - DEFAULT_KEY = "" - DEFAULT_KEY_DERIVATION = "ARGON2I_MOD" - DEFAULT_STORAGE_TYPE = None - - KEY_DERIVATION_RAW = "RAW" - KEY_DERIVATION_ARGON2I_INT = "ARGON2I_INT" - KEY_DERIVATION_ARGON2I_MOD = "ARGON2I_MOD" - - def __init__(self, config: Mapping[str, Any] = None): - """Initialize an `IndySdkWalletConfig` instance. - - Args: - config: {name, key, seed, did, auto_recreate, auto_remove, - storage_type, storage_config, storage_creds} - - """ - - config = config or {} - self.auto_recreate = config.get("auto_recreate", False) - self.auto_remove = config.get("auto_remove", False) - self.freshness_time = config.get("freshness_time", self.DEFAULT_FRESHNESS) - self.key = config.get("key", self.DEFAULT_KEY) - self.key_derivation_method = ( - config.get("key_derivation_method") or self.DEFAULT_KEY_DERIVATION - ) - # self.rekey = config.get("rekey") - # self.rekey_derivation_method = config.get("rekey_derivation_method") - self.name = config.get("name") or Profile.DEFAULT_NAME - self.storage_type = config.get("storage_type") or self.DEFAULT_STORAGE_TYPE - self.storage_config = config.get("storage_config", None) - self.storage_creds = config.get("storage_creds", None) - - if self.storage_type == "postgres_storage": - load_postgres_plugin(self.storage_config, self.storage_creds) - - @property - def wallet_config(self) -> dict: - """Accessor for the Indy wallet config.""" - ret = { - "id": self.name, - "freshness_time": self.freshness_time, - "storage_type": self.storage_type, - } - if self.storage_config is not None: - ret["storage_config"] = json.loads(self.storage_config) - return ret - - @property - def wallet_access(self) -> dict: - """Accessor the Indy wallet access info.""" - ret = {"key": self.key, "key_derivation_method": self.key_derivation_method} - # if self.rekey: - # ret["rekey"] = self.rekey - # if self.rekey_derivation_method: - # ret["rekey_derivation_method"] = self.rekey_derivation_method - if self.storage_creds is not None: - ret["storage_credentials"] = json.loads(self.storage_creds) - return ret - - async def create_wallet(self) -> "IndyOpenWallet": - """Create a new wallet. - - Raises: - ProfileDuplicateError: If there was an existing wallet with the same name - ProfileError: If there was a problem removing the wallet - ProfileError: If there was another libindy error - - """ - if self.auto_recreate: - try: - await self.remove_wallet() - except ProfileNotFoundError: - pass - try: - await indy.wallet.create_wallet( - config=json.dumps(self.wallet_config), - credentials=json.dumps(self.wallet_access), - ) - except IndyError as x_indy: - if x_indy.error_code == ErrorCode.WalletAlreadyExistsError: - raise IndyErrorHandler.wrap_error( - x_indy, - f"Cannot create wallet '{self.name}', already exists", - ProfileDuplicateError, - ) from x_indy - raise IndyErrorHandler.wrap_error( - x_indy, - f"Error creating wallet '{self.name}'", - ProfileError, - ) from x_indy - - try: - return await self.open_wallet(created=True) - except ProfileNotFoundError as err: - raise ProfileError( - f"Wallet '{self.name}' not found after creation" - ) from err - - async def remove_wallet(self): - """Remove an existing wallet. - - Raises: - ProfileNotFoundError: If the wallet could not be found - ProfileError: If there was another libindy error - - """ - try: - await indy.wallet.delete_wallet( - config=json.dumps(self.wallet_config), - credentials=json.dumps(self.wallet_access), - ) - except IndyError as x_indy: - if x_indy.error_code == ErrorCode.WalletNotFoundError: - raise IndyErrorHandler.wrap_error( - x_indy, - f"Wallet '{self.name}' not found", - ProfileNotFoundError, - ) from x_indy - raise IndyErrorHandler.wrap_error( - x_indy, f"Error removing wallet '{self.name}'", ProfileError - ) from x_indy - - async def open_wallet(self, created: bool = False) -> "IndyOpenWallet": - """Open wallet, removing and/or creating it if so configured. - - Raises: - ProfileError: If wallet not found after creation - ProfileNotFoundError: If the wallet is not found - ProfileError: If the wallet is already open - ProfileError: If there is another libindy error - - """ - handle = None - - while True: - try: - handle = await indy.wallet.open_wallet( - config=json.dumps(self.wallet_config), - credentials=json.dumps(self.wallet_access), - ) - # if self.rekey: - # self.key = self.rekey - # self.rekey = None - # if self.rekey_derivation_method: - # self.key_derivation_method = self.rekey_derivation_method - # self.rekey_derivation_method = None - break - except IndyError as x_indy: - if x_indy.error_code == ErrorCode.WalletNotFoundError: - raise IndyErrorHandler.wrap_error( - x_indy, f"Wallet '{self.name}' not found", ProfileNotFoundError - ) from x_indy - elif x_indy.error_code == ErrorCode.WalletAlreadyOpenedError: - raise IndyErrorHandler.wrap_error( - x_indy, f"Wallet '{self.name}' is already open", ProfileError - ) from x_indy - else: - raise IndyErrorHandler.wrap_error( - x_indy, f"Error opening wallet '{self.name}'", ProfileError - ) from x_indy - - LOGGER.info("Creating master secret...") - try: - master_secret_id = await indy.anoncreds.prover_create_master_secret( - handle, self.name - ) - except IndyError as x_indy: - if x_indy.error_code == ErrorCode.AnoncredsMasterSecretDuplicateNameError: - LOGGER.info("Master secret already exists") - master_secret_id = self.name - else: - raise IndyErrorHandler.wrap_error( - x_indy, f"Wallet '{self.name}' error", ProfileError - ) from x_indy - - return IndyOpenWallet(self, created, handle, master_secret_id) - - -class IndyOpenWallet: - """Handle and metadata for an opened Indy wallet.""" - - def __init__( - self, - config: IndyWalletConfig, - created, - handle, - master_secret_id: str, - ): - """Create a new IndyOpenWallet instance.""" - self.config = config - self.created = created - self.handle = handle - self.master_secret_id = master_secret_id - - @property - def name(self) -> str: - """Accessor for the opened wallet name.""" - return self.config.name - - async def close(self): - """Close previously-opened wallet, removing it if so configured.""" - if self.handle: - await indy.wallet.close_wallet(self.handle) - self.handle = None - if self.config.auto_remove: - await self.config.remove_wallet() diff --git a/aries_cloudagent/indy/tests/test_verifier.py b/aries_cloudagent/indy/tests/test_verifier.py index 54c836d42a..a9bfef2734 100644 --- a/aries_cloudagent/indy/tests/test_verifier.py +++ b/aries_cloudagent/indy/tests/test_verifier.py @@ -1,5 +1,3 @@ -import pytest - from copy import deepcopy from time import time @@ -306,7 +304,6 @@ async def verify_presentation( raise NotImplementedError() -@pytest.mark.indy class TestIndySdkVerifier(IsolatedAsyncioTestCase): def setUp(self): self.ledger = mock.MagicMock( diff --git a/aries_cloudagent/ledger/base.py b/aries_cloudagent/ledger/base.py index 1ebc8926c5..509dc48d4b 100644 --- a/aries_cloudagent/ledger/base.py +++ b/aries_cloudagent/ledger/base.py @@ -643,7 +643,7 @@ async def send_schema_anoncreds( try: legacy_indy_registry = LegacyIndyRegistry() resp = await legacy_indy_registry.txn_submit( - self.profile, + self, schema_req, sign=True, sign_did=public_info, diff --git a/aries_cloudagent/ledger/indy.py b/aries_cloudagent/ledger/indy.py deleted file mode 100644 index 9dc18cbc7a..0000000000 --- a/aries_cloudagent/ledger/indy.py +++ /dev/null @@ -1,1293 +0,0 @@ -"""Indy ledger implementation.""" - -import asyncio -import json -import logging -import tempfile -from datetime import date, datetime -from io import StringIO -from os import path -from time import time -from typing import TYPE_CHECKING, List, Optional, Tuple - -import indy.ledger -import indy.pool -from indy.error import ErrorCode, IndyError - -from ..cache.base import BaseCache -from ..config.base import BaseInjector, BaseProvider, BaseSettings -from ..indy.sdk.error import IndyErrorHandler -from ..storage.base import StorageRecord -from ..storage.indy import IndySdkStorage -from ..utils import sentinel -from ..wallet.base import BaseWallet -from ..wallet.did_info import DIDInfo -from ..wallet.did_posture import DIDPosture -from ..wallet.error import WalletNotFoundError -from ..wallet.util import full_verkey -from .base import BaseLedger, Role -from .endpoint_type import EndpointType -from .error import ( - BadLedgerRequestError, - ClosedPoolError, - LedgerConfigError, - LedgerError, - LedgerTransactionError, -) -from .util import TAA_ACCEPTED_RECORD_TYPE - -if TYPE_CHECKING: - from ..indy.sdk.profile import IndySdkProfile - -LOGGER = logging.getLogger(__name__) - -GENESIS_TRANSACTION_FILE = "indy_genesis_transactions.txt" - - -def _normalize_txns(txns: str) -> str: - """Normalize a set of genesis transactions.""" - lines = StringIO() - for line in txns.splitlines(): - line = line.strip() - if line: - lines.write(line) - lines.write("\n") - return lines.getvalue() - - -class IndySdkLedgerPoolProvider(BaseProvider): - """Indy ledger pool provider which keys off the selected pool name.""" - - def provide(self, settings: BaseSettings, injector: BaseInjector): - """Create and open the pool instance.""" - - pool_name = settings.get("ledger.pool_name", "default") - keepalive = int(settings.get("ledger.keepalive", 5)) - read_only = bool(settings.get("ledger.read_only", False)) - socks_proxy = settings.get("ledger.socks_proxy") - - if read_only: - LOGGER.warning("Note: setting ledger to read-only mode") - - genesis_transactions = settings.get("ledger.genesis_transactions") - cache = injector.inject_or(BaseCache) - - ledger_pool = IndySdkLedgerPool( - pool_name, - keepalive=keepalive, - cache=cache, - genesis_transactions=genesis_transactions, - read_only=read_only, - socks_proxy=socks_proxy, - ) - - return ledger_pool - - -class IndySdkLedgerPool: - """Indy ledger manager class.""" - - def __init__( - self, - name: str, - *, - checked: bool = False, - keepalive: int = 0, - cache: BaseCache = None, - cache_duration: int = 600, - genesis_transactions: str = None, - read_only: bool = False, - socks_proxy: str = None, - ): - """Initialize an IndySdkLedgerPool instance. - - Args: - name: The Indy pool ledger configuration name - keepalive: How many seconds to keep the ledger open - cache: The cache instance to use - cache_duration: The TTL for ledger cache entries - genesis_transactions: The ledger genesis transaction as a string - read_only: Prevent any ledger write operations - socks_proxy: Specifies socks proxy for ZMQ to connect to ledger pool - """ - self.checked = checked - self.opened = False - self.ref_count = 0 - self.ref_lock = asyncio.Lock() - self.keepalive = keepalive - self.close_task: asyncio.Future = None - self.cache = cache - self.cache_duration = cache_duration - self.genesis_transactions = genesis_transactions - self.genesis_txns_cache = genesis_transactions - self.handle = None - self.name = name - self.taa_cache = None - self.read_only = read_only - self.socks_proxy = socks_proxy - - @property - def genesis_txns(self) -> str: - """Get the configured genesis transactions.""" - if not self.genesis_txns_cache: - try: - txn_path = path.join( - tempfile.gettempdir(), f"{self.name}_{GENESIS_TRANSACTION_FILE}" - ) - self.genesis_txns_cache = _normalize_txns(open(txn_path).read()) - except FileNotFoundError: - raise LedgerConfigError( - "Pool config '%s' not found", self.name - ) from None - return self.genesis_txns_cache - - async def create_pool_config( - self, genesis_transactions: str, recreate: bool = False - ): - """Create the pool ledger configuration.""" - - # indy-sdk requires a file to pass the pool configuration - # the file path includes the pool name to avoid conflicts - txn_path = path.join( - tempfile.gettempdir(), f"{self.name}_{GENESIS_TRANSACTION_FILE}" - ) - with open(txn_path, "w") as genesis_file: - genesis_file.write(genesis_transactions) - pool_config = json.dumps({"genesis_txn": txn_path}) - - if await self.check_pool_config(): - if recreate: - LOGGER.debug("Removing existing ledger config") - await indy.pool.delete_pool_ledger_config(self.name) - else: - raise LedgerConfigError( - "Ledger pool configuration already exists: %s", self.name - ) - - LOGGER.debug("Creating pool ledger config") - with IndyErrorHandler( - "Exception creating pool ledger config", LedgerConfigError - ): - await indy.pool.create_pool_ledger_config(self.name, pool_config) - - async def check_pool_config(self) -> bool: - """Check if a pool config has been created.""" - pool_names = {cfg["pool"] for cfg in await indy.pool.list_pools()} - return self.name in pool_names - - async def open(self): - """Open the pool ledger, creating it if necessary.""" - - if self.genesis_transactions: - await self.create_pool_config(self.genesis_transactions, True) - self.genesis_transactions = None - self.checked = True - elif not self.checked: - if not await self.check_pool_config(): - raise LedgerError("Ledger pool configuration has not been created") - self.checked = True - - # We only support proto ver 2 - with IndyErrorHandler( - "Exception setting ledger protocol version", LedgerConfigError - ): - await indy.pool.set_protocol_version(2) - - with IndyErrorHandler( - f"Exception opening pool ledger {self.name}", LedgerConfigError - ): - pool_config = json.dumps({}) - if self.socks_proxy is not None: - pool_config = json.dumps({"socks_proxy": self.socks_proxy}) - LOGGER.debug("Open pool with config: %s", pool_config) - self.handle = await indy.pool.open_pool_ledger(self.name, pool_config) - self.opened = True - - async def close(self): - """Close the pool ledger.""" - if self.opened: - exc = None - for _attempt in range(3): - try: - await indy.pool.close_pool_ledger(self.handle) - except IndyError as err: - await asyncio.sleep(0.01) - exc = err - continue - - self.handle = None - self.opened = False - exc = None - break - - if exc: - LOGGER.error("Exception closing pool ledger") - self.ref_count += 1 # if we are here, we should have self.ref_lock - self.close_task = None - raise IndyErrorHandler.wrap_error( - exc, "Exception closing pool ledger", LedgerError - ) - - async def context_open(self): - """Open the ledger if necessary and increase the number of active references.""" - async with self.ref_lock: - if self.close_task: - self.close_task.cancel() - if not self.opened: - LOGGER.debug("Opening the pool ledger") - await self.open() - self.ref_count += 1 - - async def context_close(self): - """Release the reference and schedule closing of the pool ledger.""" - - async def closer(timeout: int): - """Close the pool ledger after a timeout.""" - await asyncio.sleep(timeout) - async with self.ref_lock: - if not self.ref_count: - LOGGER.debug("Closing pool ledger after timeout") - await self.close() - - async with self.ref_lock: - self.ref_count -= 1 - if not self.ref_count: - if self.keepalive: - self.close_task = asyncio.ensure_future(closer(self.keepalive)) - else: - await self.close() - - -class IndySdkLedger(BaseLedger): - """Indy ledger class.""" - - BACKEND_NAME = "indy" - - def __init__( - self, - pool: IndySdkLedgerPool, - profile: "IndySdkProfile", - ): - """Initialize an IndySdkLedger instance. - - Args: - pool: The pool instance handling the raw ledger connection - profile: The IndySdkProfile instance - """ - self.pool = pool - self.profile = profile - - @property - def pool_handle(self): - """Accessor for the ledger pool handle.""" - return self.pool.handle - - @property - def pool_name(self) -> str: - """Accessor for the ledger pool name.""" - return self.pool.name - - @property - def read_only(self) -> bool: - """Accessor for the ledger read-only flag.""" - return self.pool.read_only - - async def is_ledger_read_only(self) -> bool: - """Check if ledger is read-only including TAA.""" - if self.read_only: - return self.read_only - # if TAA is required and not accepted we should be in read-only mode - taa = await self.get_txn_author_agreement() - if taa["taa_required"]: - taa_acceptance = await self.get_latest_txn_author_acceptance() - if "mechanism" not in taa_acceptance: - return True - return self.read_only - - async def __aenter__(self) -> "IndySdkLedger": - """Context manager entry. - - Returns: - The current instance - - """ - await super().__aenter__() - await self.pool.context_open() - return self - - async def __aexit__(self, exc_type, exc, tb): - """Context manager exit.""" - await self.pool.context_close() - await super().__aexit__(exc_type, exc, tb) - - async def get_wallet_public_did(self) -> DIDInfo: - """Fetch the public DID from the wallet.""" - async with self.profile.session() as session: - wallet = session.inject(BaseWallet) - return await wallet.get_public_did() - - async def _endorse( - self, - request_json: str, - endorse_did: DIDInfo = None, - ) -> str: - if not self.pool.handle: - raise ClosedPoolError( - f"Cannot endorse request with closed pool '{self.pool.name}'" - ) - - public_info = endorse_did if endorse_did else await self.get_wallet_public_did() - if not public_info: - raise BadLedgerRequestError( - "Cannot endorse transaction without a public DID" - ) - endorsed_request_json = await indy.ledger.multi_sign_request( - self.profile.wallet.handle, public_info.did, request_json - ) - return endorsed_request_json - - async def _submit( - self, - request_json: str, - sign: bool = None, - taa_accept: bool = None, - sign_did: DIDInfo = sentinel, - write_ledger: bool = True, - ) -> str: - """Sign and submit request to ledger. - - Args: - request_json: The json string to submit - sign: whether or not to sign the request - taa_accept: whether to apply TAA acceptance to the (signed, write) request - sign_did: override the signing DID - write_ledger: skip the request submission - - """ - - if not self.pool.handle: - raise ClosedPoolError( - f"Cannot sign and submit request to closed pool '{self.pool.name}'" - ) - - if sign is None or sign: - if sign_did is sentinel: - sign_did = await self.get_wallet_public_did() - if sign is None: - sign = bool(sign_did) - - if taa_accept is None and sign: - taa_accept = True - - if sign: - if not sign_did: - raise BadLedgerRequestError("Cannot sign request without a public DID") - if taa_accept: - acceptance = await self.get_latest_txn_author_acceptance() - if acceptance: - # flake8 and black 23.1.0 check collision fix - # fmt: off - request_json = await ( - indy.ledger.append_txn_author_agreement_acceptance_to_request( - request_json, - acceptance["text"], - acceptance["version"], - acceptance["digest"], - acceptance["mechanism"], - acceptance["time"], - ) - ) - # fmt: on - if write_ledger: - submit_op = indy.ledger.sign_and_submit_request( - self.pool.handle, - self.profile.wallet.handle, - sign_did.did, - request_json, - ) - else: - # multi-sign, since we expect this to get endorsed later - submit_op = indy.ledger.multi_sign_request( - self.profile.wallet.handle, sign_did.did, request_json - ) - else: - submit_op = indy.ledger.submit_request(self.pool.handle, request_json) - - with IndyErrorHandler( - "Exception raised by ledger transaction", LedgerTransactionError - ): - request_result_json = await submit_op - - if sign and not write_ledger: - return request_result_json - - request_result = json.loads(request_result_json) - - operation = request_result.get("op", "") - - if operation in ("REQNACK", "REJECT"): - raise LedgerTransactionError( - f"Ledger rejected transaction request: {request_result['reason']}" - ) - - elif operation == "REPLY": - return request_result_json - - else: - raise LedgerTransactionError( - f"Unexpected operation code from ledger: {operation}" - ) - - async def txn_endorse( - self, - request_json: str, - endorse_did: DIDInfo = None, - ) -> str: - """Endorse a (signed) ledger transaction.""" - return await self._endorse(request_json, endorse_did=endorse_did) - - async def txn_submit( - self, - request_json: str, - sign: bool = None, - taa_accept: bool = None, - sign_did: DIDInfo = sentinel, - write_ledger: bool = True, - ) -> str: - """Submit a signed (and endorsed) transaction to the ledger.""" - return await self._submit( - request_json, - sign=sign, - taa_accept=taa_accept, - sign_did=sign_did, - write_ledger=write_ledger, - ) - - async def _create_schema_request( - self, - public_info: DIDInfo, - schema_json: str, - write_ledger: bool = True, - endorser_did: str = None, - ): - """Create the ledger request for publishing a schema.""" - with IndyErrorHandler("Exception building schema request", LedgerError): - request_json = await indy.ledger.build_schema_request( - public_info.did, schema_json - ) - - if endorser_did and not write_ledger: - request_json = await indy.ledger.append_request_endorser( - request_json, endorser_did - ) - - return request_json - - async def _create_revoc_reg_def_request( - self, - public_info: DIDInfo, - revoc_reg_def_json: str, - write_ledger: bool = True, - endorser_did: str = None, - ): - """Create the ledger request for publishing a revocation registry definition.""" - with IndyErrorHandler("Exception building revoc reg def request", LedgerError): - request_json = await indy.ledger.build_revoc_reg_def_request( - public_info.did, revoc_reg_def_json - ) - - if endorser_did and not write_ledger: - request_json = await indy.ledger.append_request_endorser( - request_json, endorser_did - ) - - return request_json - - async def get_schema(self, schema_id: str) -> dict: - """Get a schema from the cache if available, otherwise fetch from the ledger. - - Args: - schema_id: The schema id (or stringified sequence number) to retrieve - - """ - if self.pool.cache: - result = await self.pool.cache.get(f"schema::{schema_id}") - if result: - return result - - if schema_id.isdigit(): - return await self.fetch_schema_by_seq_no(int(schema_id)) - else: - return await self.fetch_schema_by_id(schema_id) - - async def fetch_schema_by_id(self, schema_id: str) -> dict: - """Get schema from ledger. - - Args: - schema_id: The schema id (or stringified sequence number) to retrieve - - Returns: - Indy schema dict - - """ - - public_info = await self.get_wallet_public_did() - public_did = public_info.did if public_info else None - - with IndyErrorHandler("Exception building schema request", LedgerError): - request_json = await indy.ledger.build_get_schema_request( - public_did, schema_id - ) - - response_json = await self._submit(request_json, sign_did=public_info) - response = json.loads(response_json) - if not response["result"]["seqNo"]: - # schema not found - return None - - with IndyErrorHandler("Exception parsing schema response", LedgerError): - _, parsed_schema_json = await indy.ledger.parse_get_schema_response( - response_json - ) - - parsed_response = json.loads(parsed_schema_json) - if parsed_response and self.pool.cache: - await self.pool.cache.set( - [f"schema::{schema_id}", f"schema::{response['result']['seqNo']}"], - parsed_response, - self.pool.cache_duration, - ) - - return parsed_response - - async def fetch_schema_by_seq_no(self, seq_no: int) -> dict: - """Fetch a schema by its sequence number. - - Args: - seq_no: schema ledger sequence number - - Returns: - Indy schema dict - - """ - # get txn by sequence number, retrieve schema identifier components - request_json = await indy.ledger.build_get_txn_request( - None, None, seq_no=seq_no - ) - response = json.loads(await self._submit(request_json)) - - # transaction data format assumes node protocol >= 1.4 (circa 2018-07) - data_txn = (response["result"].get("data", {}) or {}).get("txn", {}) - if data_txn.get("type", None) == "101": # marks indy-sdk schema txn type - (origin_did, name, version) = ( - data_txn["metadata"]["from"], - data_txn["data"]["data"]["name"], - data_txn["data"]["data"]["version"], - ) - schema_id = f"{origin_did}:2:{name}:{version}" - return await self.get_schema(schema_id) - - raise LedgerTransactionError( - f"Could not get schema from ledger for seq no {seq_no}" - ) - - async def _create_credential_definition_request( - self, - public_info: DIDInfo, - credential_definition_json: str, - write_ledger: bool = True, - endorser_did: str = None, - ): - """Create the ledger request for publishing a credential definition.""" - with IndyErrorHandler("Exception building cred def request", LedgerError): - request_json = await indy.ledger.build_cred_def_request( - public_info.did, credential_definition_json - ) - - if endorser_did and not write_ledger: - request_json = await indy.ledger.append_request_endorser( - request_json, endorser_did - ) - - return request_json - - async def get_credential_definition(self, credential_definition_id: str) -> dict: - """Get a credential definition from the cache if available, otherwise the ledger. - - Args: - credential_definition_id: The schema id of the schema to fetch cred def for - - """ - if self.pool.cache: - result = await self.pool.cache.get( - f"credential_definition::{credential_definition_id}" - ) - if result: - return result - - return await self.fetch_credential_definition(credential_definition_id) - - async def fetch_credential_definition(self, credential_definition_id: str) -> dict: - """Get a credential definition from the ledger by id. - - Args: - credential_definition_id: The cred def id of the cred def to fetch - - """ - - public_info = await self.get_wallet_public_did() - public_did = public_info.did if public_info else None - - with IndyErrorHandler("Exception building cred def request", LedgerError): - request_json = await indy.ledger.build_get_cred_def_request( - public_did, credential_definition_id - ) - - response_json = await self._submit(request_json, sign_did=public_info) - - with IndyErrorHandler("Exception parsing cred def response", LedgerError): - try: - ( - _, - parsed_credential_definition_json, - ) = await indy.ledger.parse_get_cred_def_response(response_json) - parsed_response = json.loads(parsed_credential_definition_json) - except IndyError as error: - if error.error_code == ErrorCode.LedgerNotFound: - parsed_response = None - else: - raise - - if parsed_response and self.pool.cache: - await self.pool.cache.set( - f"credential_definition::{credential_definition_id}", - parsed_response, - self.pool.cache_duration, - ) - - return parsed_response - - async def credential_definition_id2schema_id(self, credential_definition_id): - """From a credential definition, get the identifier for its schema. - - Args: - credential_definition_id: The identifier of the credential definition - from which to identify a schema - """ - - # scrape schema id or sequence number from cred def id - tokens = credential_definition_id.split(":") - if len(tokens) == 8: # node protocol >= 1.4: cred def id has 5 or 8 tokens - return ":".join(tokens[3:7]) # schema id spans 0-based positions 3-6 - - # get txn by sequence number, retrieve schema identifier components - seq_no = tokens[3] - return (await self.get_schema(seq_no))["id"] - - async def get_key_for_did(self, did: str) -> str: - """Fetch the verkey for a ledger DID. - - Args: - did: The DID to look up on the ledger or in the cache - """ - nym = self.did_to_nym(did) - public_info = await self.get_wallet_public_did() - public_did = public_info.did if public_info else None - with IndyErrorHandler("Exception building nym request", LedgerError): - request_json = await indy.ledger.build_get_nym_request(public_did, nym) - response_json = await self._submit(request_json, sign_did=public_info) - data_json = (json.loads(response_json))["result"]["data"] - return full_verkey(did, json.loads(data_json)["verkey"]) if data_json else None - - async def get_all_endpoints_for_did(self, did: str) -> dict: - """Fetch all endpoints for a ledger DID. - - Args: - did: The DID to look up on the ledger or in the cache - """ - nym = self.did_to_nym(did) - public_info = await self.get_wallet_public_did() - public_did = public_info.did if public_info else None - with IndyErrorHandler("Exception building attribute request", LedgerError): - request_json = await indy.ledger.build_get_attrib_request( - public_did, nym, "endpoint", None, None - ) - response_json = await self._submit(request_json, sign_did=public_info) - data_json = json.loads(response_json)["result"]["data"] - - if data_json: - endpoints = json.loads(data_json).get("endpoint", None) - else: - endpoints = None - - return endpoints - - async def get_endpoint_for_did( - self, did: str, endpoint_type: EndpointType = None - ) -> str: - """Fetch the endpoint for a ledger DID. - - Args: - did: The DID to look up on the ledger or in the cache - endpoint_type: The type of the endpoint. If none given, returns all - """ - - if not endpoint_type: - endpoint_type = EndpointType.ENDPOINT - nym = self.did_to_nym(did) - public_info = await self.get_wallet_public_did() - public_did = public_info.did if public_info else None - with IndyErrorHandler("Exception building attribute request", LedgerError): - request_json = await indy.ledger.build_get_attrib_request( - public_did, nym, "endpoint", None, None - ) - response_json = await self._submit(request_json, sign_did=public_info) - data_json = json.loads(response_json)["result"]["data"] - if data_json: - endpoint = json.loads(data_json).get("endpoint", None) - address = endpoint.get(endpoint_type.indy, None) if endpoint else None - else: - address = None - - return address - - async def update_endpoint_for_did( - self, - did: str, - endpoint: str, - endpoint_type: EndpointType = None, - write_ledger: bool = True, - endorser_did: str = None, - routing_keys: List[str] = None, - ) -> bool: - """Check and update the endpoint on the ledger. - - Args: - did: The ledger DID - endpoint: The endpoint address - endpoint_type: The type of the endpoint - """ - public_info = await self.get_wallet_public_did() - if not public_info: - raise BadLedgerRequestError( - "Cannot update endpoint at ledger without a public DID" - ) - - if not endpoint_type: - endpoint_type = EndpointType.ENDPOINT - - all_exist_endpoints = await self.get_all_endpoints_for_did(did) - exist_endpoint_of_type = ( - all_exist_endpoints.get(endpoint_type.indy, None) - if all_exist_endpoints - else None - ) - - if exist_endpoint_of_type != endpoint: - if await self.is_ledger_read_only(): - raise LedgerError( - "Error cannot update endpoint when ledger is in read only mode, " - "or TAA is required and not accepted" - ) - - nym = self.did_to_nym(did) - - attr_json = await self._construct_attr_json( - endpoint, endpoint_type, all_exist_endpoints, routing_keys - ) - - with IndyErrorHandler("Exception building attribute request", LedgerError): - request_json = await indy.ledger.build_attrib_request( - nym, nym, None, attr_json, None - ) - - if endorser_did and not write_ledger: - request_json = await indy.ledger.append_request_endorser( - request_json, endorser_did - ) - resp = await self._submit( - request_json, - sign=True, - sign_did=public_info, - write_ledger=write_ledger, - ) - if not write_ledger: - return {"signed_txn": resp} - - await self._submit(request_json, True, True) - return True - - return False - - async def register_nym( - self, - did: str, - verkey: str, - alias: str = None, - role: str = None, - write_ledger: bool = True, - endorser_did: str = None, - ) -> Tuple[bool, dict]: - """Register a nym on the ledger. - - Args: - did: DID to register on the ledger. - verkey: The verification key of the keypair. - alias: Human-friendly alias to assign to the DID. - role: For permissioned ledgers, what role should the new DID have. - """ - if await self.is_ledger_read_only(): - raise LedgerError( - "Error cannot register nym when ledger is in read only mode, " - "or TAA is required and not accepted" - ) - - public_info = await self.get_wallet_public_did() - if not public_info: - raise WalletNotFoundError( - f"Cannot register NYM to ledger: wallet {self.profile.name} " - "has no public DID" - ) - with IndyErrorHandler("Exception building nym request", LedgerError): - request_json = await indy.ledger.build_nym_request( - public_info.did, did, verkey, alias, role - ) - if endorser_did and not write_ledger: - request_json = await indy.ledger.append_request_endorser( - request_json, endorser_did - ) - resp = await self._submit( - request_json, sign=True, sign_did=public_info, write_ledger=write_ledger - ) # let ledger raise on insufficient privilege - if not write_ledger: - return True, {"signed_txn": resp} - async with self.profile.session() as session: - wallet = session.inject(BaseWallet) - try: - did_info = await wallet.get_local_did(did) - except WalletNotFoundError: - pass # registering another user's NYM - else: - metadata = {**did_info.metadata, **DIDPosture.POSTED.metadata} - await wallet.replace_local_did_metadata(did, metadata) - return True, None - - async def get_nym_role(self, did: str) -> Role: - """Return the role of the input public DID's NYM on the ledger. - - Args: - did: DID to query for role on the ledger. - """ - public_info = await self.get_wallet_public_did() - public_did = public_info.did if public_info else None - - with IndyErrorHandler("Exception building get-nym request", LedgerError): - request_json = await indy.ledger.build_get_nym_request(public_did, did) - - response_json = await self._submit(request_json) - response = json.loads(response_json) - nym_data = json.loads(response["result"]["data"]) - if not nym_data: - raise BadLedgerRequestError(f"DID {did} is not public") - - return Role.get(nym_data["role"]) - - def nym_to_did(self, nym: str) -> str: - """Format a nym with the ledger's DID prefix.""" - if nym: - # remove any existing prefix - nym = self.did_to_nym(nym) - return f"did:sov:{nym}" - - async def build_and_return_get_nym_request( - self, submitter_did: Optional[str], target_did: str - ) -> str: - """Build GET_NYM request and return request_json.""" - with IndyErrorHandler("Exception building nym request", LedgerError): - request_json = await indy.ledger.build_get_nym_request( - submitter_did, target_did - ) - return request_json - - async def submit_get_nym_request(self, request_json: str) -> str: - """Submit GET_NYM request to ledger and return response_json.""" - response_json = await self._submit(request_json) - return response_json - - async def rotate_public_did_keypair(self, next_seed: str = None) -> None: - """Rotate keypair for public DID: create new key, submit to ledger, update wallet. - - Args: - next_seed: seed for incoming ed25519 keypair (default random) - """ - # generate new key - public_info = await self.get_wallet_public_did() - public_did = public_info.did - async with self.profile.session() as session: - wallet = session.inject(BaseWallet) - verkey = await wallet.rotate_did_keypair_start(public_did, next_seed) - - # submit to ledger (retain role and alias) - nym = self.did_to_nym(public_did) - with IndyErrorHandler("Exception building nym request", LedgerError): - request_json = await indy.ledger.build_get_nym_request(public_did, nym) - - response_json = await self._submit(request_json) - data = json.loads((json.loads(response_json))["result"]["data"]) - if not data: - raise BadLedgerRequestError( - f"Ledger has no public DID for wallet {self.profile.name}" - ) - seq_no = data["seqNo"] - - with IndyErrorHandler("Exception building get-txn request", LedgerError): - txn_req_json = await indy.ledger.build_get_txn_request(None, None, seq_no) - - txn_resp_json = await self._submit(txn_req_json) - txn_resp = json.loads(txn_resp_json) - txn_resp_data = txn_resp["result"]["data"] - if not txn_resp_data: - raise BadLedgerRequestError( - f"Bad or missing ledger NYM transaction for DID {public_did}" - ) - txn_data_data = txn_resp_data["txn"]["data"] - role_token = Role.get(txn_data_data.get("role")).token() - alias = txn_data_data.get("alias") - await self.register_nym(public_did, verkey, role_token, alias) - - # update wallet - async with self.profile.session() as session: - wallet = session.inject(BaseWallet) - await wallet.rotate_did_keypair_apply(public_did) - - async def get_txn_author_agreement(self, reload: bool = False) -> dict: - """Get the current transaction author agreement, fetching it if necessary.""" - if not self.pool.taa_cache or reload: - self.pool.taa_cache = await self.fetch_txn_author_agreement() - return self.pool.taa_cache - - async def fetch_txn_author_agreement(self) -> dict: - """Fetch the current AML and TAA from the ledger.""" - public_info = await self.get_wallet_public_did() - public_did = public_info.did if public_info else None - - get_aml_req = await indy.ledger.build_get_acceptance_mechanisms_request( - public_did, None, None - ) - response_json = await self._submit(get_aml_req, sign_did=public_info) - aml_found = (json.loads(response_json))["result"]["data"] - - get_taa_req = await indy.ledger.build_get_txn_author_agreement_request( - public_did, None - ) - response_json = await self._submit(get_taa_req, sign_did=public_info) - taa_found = (json.loads(response_json))["result"]["data"] - taa_required = bool(taa_found and taa_found["text"]) - if taa_found: - taa_found["digest"] = self.taa_digest( - taa_found["version"], taa_found["text"] - ) - - return { - "aml_record": aml_found, - "taa_record": taa_found, - "taa_required": taa_required, - } - - async def get_indy_storage(self) -> IndySdkStorage: - """Get an IndySdkStorage instance for the current wallet.""" - return IndySdkStorage(self.profile.wallet) - - def taa_rough_timestamp(self) -> int: - """Get a timestamp accurate to the day. - - Anything more accurate is a privacy concern. - """ - return int( - datetime.combine( - date.today(), datetime.min.time(), datetime.timezone.utc - ).timestamp() - ) - - async def accept_txn_author_agreement( - self, taa_record: dict, mechanism: str, accept_time: int = None - ): - """Save a new record recording the acceptance of the TAA.""" - if not accept_time: - accept_time = self.taa_rough_timestamp() - acceptance = { - "text": taa_record["text"], - "version": taa_record["version"], - "digest": taa_record["digest"], - "mechanism": mechanism, - "time": accept_time, - } - record = StorageRecord( - TAA_ACCEPTED_RECORD_TYPE, - json.dumps(acceptance), - {"pool_name": self.pool.name}, - ) - storage = await self.get_indy_storage() - await storage.add_record(record) - if self.pool.cache: - cache_key = ( - TAA_ACCEPTED_RECORD_TYPE - + "::" - + self.profile.name - + "::" - + self.pool.name - + "::" - ) - await self.pool.cache.set(cache_key, acceptance, self.pool.cache_duration) - - async def get_latest_txn_author_acceptance(self) -> dict: - """Look up the latest TAA acceptance.""" - cache_key = ( - TAA_ACCEPTED_RECORD_TYPE - + "::" - + self.profile.name - + "::" - + self.pool.name - + "::" - ) - acceptance = self.pool.cache and await self.pool.cache.get(cache_key) - if not acceptance: - storage = await self.get_indy_storage() - tag_filter = {"pool_name": self.pool.name} - found = await storage.find_all_records(TAA_ACCEPTED_RECORD_TYPE, tag_filter) - if found: - records = [json.loads(record.value) for record in found] - records.sort(key=lambda v: v["time"], reverse=True) - acceptance = records[0] - else: - acceptance = {} - if self.pool.cache: - await self.pool.cache.set( - cache_key, acceptance, self.pool.cache_duration - ) - return acceptance - - async def get_revoc_reg_def(self, revoc_reg_id: str) -> dict: - """Get revocation registry definition by ID; augment with ledger timestamp.""" - public_info = await self.get_wallet_public_did() - try: - fetch_req = await indy.ledger.build_get_revoc_reg_def_request( - public_info and public_info.did, revoc_reg_id - ) - response_json = await self._submit(fetch_req, sign_did=public_info) - ( - found_id, - found_def_json, - ) = await indy.ledger.parse_get_revoc_reg_def_response(response_json) - found_def = json.loads(found_def_json) - found_def["txnTime"] = json.loads(response_json)["result"]["txnTime"] - - except IndyError as e: - LOGGER.error( - f"get_revoc_reg_def failed with revoc_reg_id={revoc_reg_id} - " - f"{e.error_code}: {getattr(e, 'message', '[no message]')}" - ) - raise e - - assert found_id == revoc_reg_id - return found_def - - async def get_revoc_reg_entry(self, revoc_reg_id: str, timestamp: int): - """Get revocation registry entry by revocation registry ID and timestamp.""" - public_info = await self.get_wallet_public_did() - with IndyErrorHandler("Exception fetching rev reg entry", LedgerError): - try: - fetch_req = await indy.ledger.build_get_revoc_reg_request( - public_info and public_info.did, revoc_reg_id, timestamp - ) - response_json = await self._submit(fetch_req, sign_did=public_info) - ( - found_id, - found_reg_json, - ledger_timestamp, - ) = await indy.ledger.parse_get_revoc_reg_response(response_json) - except IndyError as e: - LOGGER.error( - f"get_revoc_reg_entry failed with revoc_reg_id={revoc_reg_id} - " - f"{e.error_code}: {getattr(e, 'message', '[no message]')}" - ) - raise e - assert found_id == revoc_reg_id - return json.loads(found_reg_json), ledger_timestamp - - async def get_revoc_reg_delta( - self, revoc_reg_id: str, fro=0, to=None - ) -> Tuple[dict, int]: - """Look up a revocation registry delta by ID. - - :param revoc_reg_id revocation registry id - :param fro earliest EPOCH time of interest - :param to latest EPOCH time of interest - - :returns delta response, delta timestamp - """ - if to is None: - to = int(time()) - public_info = await self.get_wallet_public_did() - with IndyErrorHandler("Exception building rev reg delta request", LedgerError): - fetch_req = await indy.ledger.build_get_revoc_reg_delta_request( - public_info and public_info.did, - revoc_reg_id, - 0 if fro == to else fro, - to, - ) - response_json = await self._submit(fetch_req, sign_did=public_info) - with IndyErrorHandler( - ( - "Exception parsing rev reg delta response " - "(interval ends before rev reg creation?)" - ), - LedgerError, - ): - ( - found_id, - found_delta_json, - delta_timestamp, - ) = await indy.ledger.parse_get_revoc_reg_delta_response(response_json) - assert found_id == revoc_reg_id - return json.loads(found_delta_json), delta_timestamp - - async def send_revoc_reg_def( - self, - revoc_reg_def: dict, - issuer_did: str = None, - write_ledger: bool = True, - endorser_did: str = None, - ) -> dict: - """Publish a revocation registry definition to the ledger.""" - # NOTE - issuer DID could be extracted from the revoc_reg_def ID - if issuer_did: - async with self.profile.session() as session: - wallet = session.inject(BaseWallet) - did_info = await wallet.get_local_did(issuer_did) - else: - did_info = await self.get_wallet_public_did() - if not did_info: - raise LedgerTransactionError( - "No issuer DID found for revocation registry definition" - ) - - if self.profile.context.settings.get("wallet.type") == "askar-anoncreds": - from aries_cloudagent.anoncreds.default.legacy_indy.registry import ( - LegacyIndyRegistry, - ) - - rev_reg_def_req = await self._create_revoc_reg_def_request( - did_info, - json.dumps(revoc_reg_def), - write_ledger=write_ledger, - endorser_did=endorser_did, - ) - legacy_indy_registry = LegacyIndyRegistry() - - resp = await legacy_indy_registry.txn_submit( - self.profile, - rev_reg_def_req, - sign=True, - sign_did=did_info, - write_ledger=write_ledger, - ) - - if not write_ledger: - return revoc_reg_def["id"], {"signed_txn": resp} - - try: - # parse sequence number out of response - seq_no = json.loads(resp)["result"]["txnMetadata"]["seqNo"] - return seq_no - except KeyError as err: - raise LedgerError( - "Failed to parse sequence number from ledger response" - ) from err - else: - with IndyErrorHandler("Exception building rev reg def", LedgerError): - request_json = await indy.ledger.build_revoc_reg_def_request( - did_info.did, json.dumps(revoc_reg_def) - ) - - if endorser_did and not write_ledger: - request_json = await indy.ledger.append_request_endorser( - request_json, endorser_did - ) - resp = await self._submit( - request_json, True, sign_did=did_info, write_ledger=write_ledger - ) - - return {"result": resp} - - async def send_revoc_reg_entry( - self, - revoc_reg_id: str, - revoc_def_type: str, - revoc_reg_entry: dict, - issuer_did: str = None, - write_ledger: bool = True, - endorser_did: str = None, - ) -> dict: - """Publish a revocation registry entry to the ledger.""" - if issuer_did: - async with self.profile.session() as session: - wallet = session.inject(BaseWallet) - did_info = await wallet.get_local_did(issuer_did) - else: - did_info = await self.get_wallet_public_did() - if not did_info: - raise LedgerTransactionError( - "No issuer DID found for revocation registry entry" - ) - - if self.profile.context.settings.get("wallet.type") == "askar-anoncreds": - from aries_cloudagent.anoncreds.default.legacy_indy.registry import ( - LegacyIndyRegistry, - ) - - rev_reg_def_entry_req = await self._create_revoc_reg_def_request( - did_info, - json.dumps(revoc_reg_entry), - write_ledger=write_ledger, - endorser_did=endorser_did, - ) - legacy_indy_registry = LegacyIndyRegistry() - - resp = await legacy_indy_registry.txn_submit( - self.profile, - rev_reg_def_entry_req, - sign=True, - sign_did=did_info, - write_ledger=write_ledger, - ) - - if not write_ledger: - return rev_reg_def_entry_req["id"], {"signed_txn": resp} - - try: - # parse sequence number out of response - seq_no = json.loads(resp)["result"]["txnMetadata"]["seqNo"] - return seq_no - except KeyError as err: - raise LedgerError( - "Failed to parse sequence number from ledger response" - ) from err - else: - with IndyErrorHandler("Exception building rev reg entry", LedgerError): - request_json = await indy.ledger.build_revoc_reg_entry_request( - did_info.did, - revoc_reg_id, - revoc_def_type, - json.dumps(revoc_reg_entry), - ) - - if endorser_did and not write_ledger: - request_json = await indy.ledger.append_request_endorser( - request_json, endorser_did - ) - - resp = await self._submit( - request_json, True, sign_did=did_info, write_ledger=write_ledger - ) - return {"result": resp} diff --git a/aries_cloudagent/ledger/indy_vdr.py b/aries_cloudagent/ledger/indy_vdr.py index 7444311343..3e8c2dbb2b 100644 --- a/aries_cloudagent/ledger/indy_vdr.py +++ b/aries_cloudagent/ledger/indy_vdr.py @@ -1092,6 +1092,29 @@ async def get_revoc_reg_delta( ) from err response_value = response["data"]["value"] + accum_to = response_value.get("accum_to") + + # If accum_to is not present, then the timestamp_to was before the registry + # was created. In this case, we need to fetch the registry creation timestamp and + # re-calculate the delta. + if not accum_to: + try: + (_, timestamp) = await self.get_revoc_reg_entry( + revoc_reg_id, int(time()) + ) + fetch_req = ledger.build_get_revoc_reg_delta_request( + public_info and public_info.did, + revoc_reg_id, + timestamp_from, + timestamp, + ) + response = await self._submit(fetch_req, sign_did=public_info) + response_value = response["data"]["value"] + except VdrError as err: + raise LedgerError( + f"get_revoc_reg_delta failed for revoc_reg_id='{revoc_reg_id}'" + ) from err + delta_value = { "accum": response_value["accum_to"]["value"]["accum"], "issued": response_value.get("issued", []), @@ -1147,7 +1170,7 @@ async def send_revoc_reg_def( legacy_indy_registry = LegacyIndyRegistry() resp = await legacy_indy_registry.txn_submit( - self.profile, + self, rev_reg_def_req, sign=True, sign_did=did_info, @@ -1222,7 +1245,7 @@ async def send_revoc_reg_entry( legacy_indy_registry = LegacyIndyRegistry() resp = await legacy_indy_registry.txn_submit( - self.profile, + self, revoc_reg_entry_req, sign=True, sign_did=did_info, diff --git a/aries_cloudagent/ledger/multiple_ledger/indy_manager.py b/aries_cloudagent/ledger/multiple_ledger/indy_manager.py deleted file mode 100644 index 49c52355ab..0000000000 --- a/aries_cloudagent/ledger/multiple_ledger/indy_manager.py +++ /dev/null @@ -1,269 +0,0 @@ -"""Multiple IndySdkLedger Manager.""" - -import asyncio -import concurrent.futures -import json -import logging -from collections import OrderedDict -from typing import List, Mapping, Optional, Tuple - -from ...cache.base import BaseCache -from ...core.profile import Profile -from ...ledger.base import BaseLedger -from ...ledger.error import LedgerError -from ...wallet.crypto import did_is_self_certified -from ..indy import IndySdkLedger -from ..merkel_validation.domain_txn_handler import ( - get_proof_nodes, - prepare_for_state_read, -) -from ..merkel_validation.trie import SubTrie -from .base_manager import BaseMultipleLedgerManager, MultipleLedgerManagerError - -LOGGER = logging.getLogger(__name__) - - -class MultiIndyLedgerManager(BaseMultipleLedgerManager): - """Multiple Indy SDK Ledger Manager.""" - - def __init__( - self, - profile: Profile, - production_ledgers: Optional[OrderedDict] = None, - non_production_ledgers: Optional[OrderedDict] = None, - writable_ledgers: Optional[set] = None, - endorser_map: Optional[dict] = None, - cache_ttl: int = None, - ): - """Initialize MultiIndyLedgerManager. - - Args: - profile: The base profile for this manager - production_ledgers: production IndySdkLedger mapping - non_production_ledgers: non_production IndySdkLedger mapping - cache_ttl: Time in sec to persist did_ledger_id_resolver cache keys - - """ - self.profile = profile - self.production_ledgers = production_ledgers or OrderedDict() - self.non_production_ledgers = non_production_ledgers or OrderedDict() - self.writable_ledgers = writable_ledgers or set() - self.endorser_map = endorser_map or {} - self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=5) - self.cache_ttl = cache_ttl - - async def get_write_ledgers(self) -> List[str]: - """Return the write IndySdkLedger instance.""" - return list(self.writable_ledgers) - - def get_endorser_info_for_ledger(self, ledger_id: str) -> Optional[Tuple[str, str]]: - """Return endorser alias, did tuple for provided ledger, if available.""" - endorser_info = self.endorser_map.get(ledger_id) - if not endorser_info: - return None - return (endorser_info["endorser_alias"], endorser_info["endorser_did"]) - - async def get_ledger_inst_by_id(self, ledger_id: str) -> Optional[BaseLedger]: - """Return BaseLedger instance.""" - return self.production_ledgers.get( - ledger_id - ) or self.non_production_ledgers.get(ledger_id) - - async def get_prod_ledgers(self) -> Mapping: - """Return production ledgers mapping.""" - return self.production_ledgers - - async def get_nonprod_ledgers(self) -> Mapping: - """Return non_production ledgers mapping.""" - return self.non_production_ledgers - - async def get_ledger_id_by_ledger_pool_name(self, pool_name: str) -> str: - """Return ledger_id by ledger pool name.""" - multi_ledgers = self.production_ledgers | self.non_production_ledgers - for ledger_id, indy_vdr_ledger in multi_ledgers.items(): - if indy_vdr_ledger.pool_name == pool_name: - return ledger_id - raise MultipleLedgerManagerError( - f"Provided Ledger pool name {pool_name} not found " - "in either production_ledgers or non_production_ledgers" - ) - - async def _get_ledger_by_did( - self, - ledger_id: str, - did: str, - ) -> Optional[Tuple[str, IndySdkLedger, bool]]: - """Build and submit GET_NYM request and process response. - - Successful response return tuple with ledger_id, IndySdkLedger instance - and is_self_certified bool flag. Unsuccessful response return None. - - Args: - ledger_id: provided ledger_id to retrieve IndySdkLedger instance - from production_ledgers or non_production_ledgers - did: provided DID - - Return: - (str, IndySdkLedger, bool) or None - """ - try: - indy_sdk_ledger = None - if ledger_id in self.production_ledgers: - indy_sdk_ledger = self.production_ledgers.get(ledger_id) - else: - indy_sdk_ledger = self.non_production_ledgers.get(ledger_id) - async with indy_sdk_ledger: - request = await indy_sdk_ledger.build_and_return_get_nym_request( - None, did - ) - response_json = await asyncio.wait_for( - indy_sdk_ledger.submit_get_nym_request(request), 10 - ) - response = json.loads(response_json) - data = response.get("result", {}).get("data") - if not data: - LOGGER.warning(f"Did {did} not posted to ledger {ledger_id}") - return None - if isinstance(data, str): - data = json.loads(data) - if not await SubTrie.verify_spv_proof( - expected_value=prepare_for_state_read(response), - proof_nodes=get_proof_nodes(response), - ): - LOGGER.warning( - f"State Proof validation failed for Did {did} " - f"and ledger {ledger_id}" - ) - return None - if did_is_self_certified(did, data.get("verkey")): - return (ledger_id, indy_sdk_ledger, True) - return (ledger_id, indy_sdk_ledger, False) - except asyncio.TimeoutError: - LOGGER.exception( - f"get-nym request timedout for Did {did} and " - f"ledger {ledger_id}, reply not received within 10 sec" - ) - return None - except LedgerError as err: - LOGGER.error( - "Exception when building and submitting get-nym request, " - f"for Did {did} and ledger {ledger_id}, {err}" - ) - return None - - async def lookup_did_in_configured_ledgers( - self, did: str, cache_did: bool = True - ) -> Tuple[str, IndySdkLedger]: - """Lookup given DID in configured ledgers in parallel.""" - self.cache = self.profile.inject_or(BaseCache) - cache_key = f"did_ledger_id_resolver::{did}" - if bool(cache_did and self.cache and await self.cache.get(cache_key)): - cached_ledger_id = await self.cache.get(cache_key) - if cached_ledger_id in self.production_ledgers: - return (cached_ledger_id, self.production_ledgers.get(cached_ledger_id)) - elif cached_ledger_id in self.non_production_ledgers: - return ( - cached_ledger_id, - self.non_production_ledgers.get(cached_ledger_id), - ) - else: - raise MultipleLedgerManagerError( - f"cached ledger_id {cached_ledger_id} not found in either " - "production_ledgers or non_production_ledgers" - ) - applicable_prod_ledgers = {"self_certified": {}, "non_self_certified": {}} - applicable_non_prod_ledgers = {"self_certified": {}, "non_self_certified": {}} - ledger_ids = list(self.production_ledgers.keys()) + list( - self.non_production_ledgers.keys() - ) - coro_futures = { - self.executor.submit(self._get_ledger_by_did, ledger_id, did): ledger_id - for ledger_id in ledger_ids - } - for coro_future in concurrent.futures.as_completed(coro_futures): - result = await coro_future.result() - if result: - applicable_ledger_id = result[0] - applicable_ledger_inst = result[1] - is_self_certified = result[2] - if applicable_ledger_id in self.production_ledgers: - insert_key = list(self.production_ledgers).index( - applicable_ledger_id - ) - if is_self_certified: - applicable_prod_ledgers["self_certified"][insert_key] = ( - applicable_ledger_id, - applicable_ledger_inst, - ) - else: - applicable_prod_ledgers["non_self_certified"][insert_key] = ( - applicable_ledger_id, - applicable_ledger_inst, - ) - else: - insert_key = list(self.non_production_ledgers).index( - applicable_ledger_id - ) - if is_self_certified: - applicable_non_prod_ledgers["self_certified"][insert_key] = ( - applicable_ledger_id, - applicable_ledger_inst, - ) - else: - applicable_non_prod_ledgers["non_self_certified"][ - insert_key - ] = (applicable_ledger_id, applicable_ledger_inst) - applicable_prod_ledgers["self_certified"] = OrderedDict( - sorted(applicable_prod_ledgers.get("self_certified").items()) - ) - applicable_prod_ledgers["non_self_certified"] = OrderedDict( - sorted(applicable_prod_ledgers.get("non_self_certified").items()) - ) - applicable_non_prod_ledgers["self_certified"] = OrderedDict( - sorted(applicable_non_prod_ledgers.get("self_certified").items()) - ) - applicable_non_prod_ledgers["non_self_certified"] = OrderedDict( - sorted(applicable_non_prod_ledgers.get("non_self_certified").items()) - ) - if len(applicable_prod_ledgers.get("self_certified")) > 0: - successful_ledger_inst = list( - applicable_prod_ledgers.get("self_certified").values() - )[0] - if cache_did and self.cache: - await self.cache.set( - cache_key, successful_ledger_inst[0], self.cache_ttl - ) - return successful_ledger_inst - elif len(applicable_non_prod_ledgers.get("self_certified")) > 0: - successful_ledger_inst = list( - applicable_non_prod_ledgers.get("self_certified").values() - )[0] - if cache_did and self.cache: - await self.cache.set( - cache_key, successful_ledger_inst[0], self.cache_ttl - ) - return successful_ledger_inst - elif len(applicable_prod_ledgers.get("non_self_certified")) > 0: - successful_ledger_inst = list( - applicable_prod_ledgers.get("non_self_certified").values() - )[0] - if cache_did and self.cache: - await self.cache.set( - cache_key, successful_ledger_inst[0], self.cache_ttl - ) - return successful_ledger_inst - elif len(applicable_non_prod_ledgers.get("non_self_certified")) > 0: - successful_ledger_inst = list( - applicable_non_prod_ledgers.get("non_self_certified").values() - )[0] - if cache_did and self.cache: - await self.cache.set( - cache_key, successful_ledger_inst[0], self.cache_ttl - ) - return successful_ledger_inst - else: - raise MultipleLedgerManagerError( - f"DID {did} not found in any of the ledgers total: " - f"(production: {len(self.production_ledgers)}, " - f"non_production: {len(self.non_production_ledgers)})" - ) diff --git a/aries_cloudagent/ledger/multiple_ledger/ledger_config_schema.py b/aries_cloudagent/ledger/multiple_ledger/ledger_config_schema.py index 1981b6aa62..89901826b7 100644 --- a/aries_cloudagent/ledger/multiple_ledger/ledger_config_schema.py +++ b/aries_cloudagent/ledger/multiple_ledger/ledger_config_schema.py @@ -1,8 +1,7 @@ """Schema for configuring multiple ledgers.""" -import uuid - from marshmallow import EXCLUDE, fields, pre_load +from uuid_utils import uuid4 from ...messaging.models.base import BaseModel, BaseModelSchema from ...messaging.models.openapi import OpenAPISchema @@ -56,7 +55,7 @@ class Meta: def validate_id(self, data, **kwargs): """Check if id is present, if not then set to UUID4.""" if "id" not in data: - data["id"] = str(uuid.uuid4()) + data["id"] = str(uuid4()) return data diff --git a/aries_cloudagent/ledger/multiple_ledger/manager_provider.py b/aries_cloudagent/ledger/multiple_ledger/manager_provider.py index 3a6799034f..13a88a79fd 100644 --- a/aries_cloudagent/ledger/multiple_ledger/manager_provider.py +++ b/aries_cloudagent/ledger/multiple_ledger/manager_provider.py @@ -20,12 +20,6 @@ class MultiIndyLedgerManagerProvider(BaseProvider): """Multiple Indy ledger support manager provider.""" MANAGER_TYPES = { - "basic": ( - DeferLoad( - "aries_cloudagent.ledger.multiple_ledger." - "indy_manager.MultiIndyLedgerManager" - ) - ), "askar-profile": ( DeferLoad( "aries_cloudagent.ledger.multiple_ledger." @@ -34,10 +28,6 @@ class MultiIndyLedgerManagerProvider(BaseProvider): ), } LEDGER_TYPES = { - "basic": { - "pool": DeferLoad("aries_cloudagent.ledger.indy.IndySdkLedgerPool"), - "ledger": DeferLoad("aries_cloudagent.ledger.indy.IndySdkLedger"), - }, "askar-profile": { "pool": DeferLoad("aries_cloudagent.ledger.indy_vdr.IndyVdrLedgerPool"), "ledger": DeferLoad("aries_cloudagent.ledger.indy_vdr.IndyVdrLedger"), @@ -52,14 +42,11 @@ def __init__(self, root_profile): def provide(self, settings: BaseSettings, injector: BaseInjector): """Create the multiple Indy ledger manager instance.""" - if self.root_profile.BACKEND_NAME == "indy": - manager_type = "basic" - elif self.root_profile.BACKEND_NAME == "askar": + if self.root_profile.BACKEND_NAME == "askar": manager_type = "askar-profile" else: raise MultipleLedgerManagerError( - "MultiIndyLedgerManagerProvider expects an IndySdkProfile [indy] " - " or AskarProfile [indy_vdr] as root_profile" + f"Unexpected wallet backend: {self.root_profile.BACKEND_NAME}" ) if manager_type not in self._inst: @@ -68,102 +55,53 @@ def provide(self, settings: BaseSettings, injector: BaseInjector): ledger_class = self.LEDGER_TYPES[manager_type]["ledger"] LOGGER.info("Create multiple Indy ledger manager: %s", manager_type) try: - if manager_type == "basic": - indy_sdk_production_ledgers = OrderedDict() - indy_sdk_non_production_ledgers = OrderedDict() - ledger_config_list = settings.get_value("ledger.ledger_config_list") - ledger_endorser_map = {} - write_ledgers = set() - for config in ledger_config_list: - keepalive = config.get("keepalive") - read_only = config.get("read_only") - socks_proxy = config.get("socks_proxy") - genesis_transactions = config.get("genesis_transactions") - cache = injector.inject_or(BaseCache) - ledger_id = config.get("id") - pool_name = config.get("pool_name") - ledger_is_production = config.get("is_production") - ledger_is_write = config.get("is_write") - ledger_endorser_alias = config.get("endorser_alias") - ledger_endorser_did = config.get("endorser_did") - ledger_pool = pool_class( - pool_name, - keepalive=keepalive, - cache=cache, - genesis_transactions=genesis_transactions, - read_only=read_only, - socks_proxy=socks_proxy, - ) - ledger_instance = ledger_class( - pool=ledger_pool, - profile=self.root_profile, - ) - if ledger_is_write: - write_ledgers.add(ledger_id) - if ledger_is_production: - indy_sdk_production_ledgers[ledger_id] = ledger_instance - else: - indy_sdk_non_production_ledgers[ledger_id] = ledger_instance - if ledger_endorser_alias and ledger_endorser_did: - ledger_endorser_map[ledger_id] = { - "endorser_alias": ledger_endorser_alias, - "endorser_did": ledger_endorser_did, - } - self._inst[manager_type] = manager_class( - self.root_profile, - production_ledgers=indy_sdk_production_ledgers, - non_production_ledgers=indy_sdk_non_production_ledgers, - writable_ledgers=write_ledgers, - endorser_map=ledger_endorser_map, + indy_vdr_production_ledgers = OrderedDict() + indy_vdr_non_production_ledgers = OrderedDict() + ledger_config_list = settings.get_value("ledger.ledger_config_list") + ledger_endorser_map = {} + write_ledgers = set() + for config in ledger_config_list: + keepalive = config.get("keepalive") + read_only = config.get("read_only") + socks_proxy = config.get("socks_proxy") + genesis_transactions = config.get("genesis_transactions") + cache = injector.inject_or(BaseCache) + ledger_id = config.get("id") + pool_name = config.get("pool_name") + ledger_is_production = config.get("is_production") + ledger_is_write = config.get("is_write") + ledger_endorser_alias = config.get("endorser_alias") + ledger_endorser_did = config.get("endorser_did") + ledger_pool = pool_class( + pool_name, + keepalive=keepalive, + cache=cache, + genesis_transactions=genesis_transactions, + read_only=read_only, + socks_proxy=socks_proxy, ) - else: - indy_vdr_production_ledgers = OrderedDict() - indy_vdr_non_production_ledgers = OrderedDict() - ledger_config_list = settings.get_value("ledger.ledger_config_list") - ledger_endorser_map = {} - write_ledgers = set() - for config in ledger_config_list: - keepalive = config.get("keepalive") - read_only = config.get("read_only") - socks_proxy = config.get("socks_proxy") - genesis_transactions = config.get("genesis_transactions") - cache = injector.inject_or(BaseCache) - ledger_id = config.get("id") - pool_name = config.get("pool_name") - ledger_is_production = config.get("is_production") - ledger_is_write = config.get("is_write") - ledger_endorser_alias = config.get("endorser_alias") - ledger_endorser_did = config.get("endorser_did") - ledger_pool = pool_class( - pool_name, - keepalive=keepalive, - cache=cache, - genesis_transactions=genesis_transactions, - read_only=read_only, - socks_proxy=socks_proxy, - ) - ledger_instance = ledger_class( - pool=ledger_pool, - profile=self.root_profile, - ) - if ledger_is_write: - write_ledgers.add(ledger_id) - if ledger_is_production: - indy_vdr_production_ledgers[ledger_id] = ledger_instance - else: - indy_vdr_non_production_ledgers[ledger_id] = ledger_instance - if ledger_endorser_alias and ledger_endorser_did: - ledger_endorser_map[ledger_id] = { - "endorser_alias": ledger_endorser_alias, - "endorser_did": ledger_endorser_did, - } - self._inst[manager_type] = manager_class( - self.root_profile, - production_ledgers=indy_vdr_production_ledgers, - non_production_ledgers=indy_vdr_non_production_ledgers, - writable_ledgers=write_ledgers, - endorser_map=ledger_endorser_map, + ledger_instance = ledger_class( + pool=ledger_pool, + profile=self.root_profile, ) + if ledger_is_write: + write_ledgers.add(ledger_id) + if ledger_is_production: + indy_vdr_production_ledgers[ledger_id] = ledger_instance + else: + indy_vdr_non_production_ledgers[ledger_id] = ledger_instance + if ledger_endorser_alias and ledger_endorser_did: + ledger_endorser_map[ledger_id] = { + "endorser_alias": ledger_endorser_alias, + "endorser_did": ledger_endorser_did, + } + self._inst[manager_type] = manager_class( + self.root_profile, + production_ledgers=indy_vdr_production_ledgers, + non_production_ledgers=indy_vdr_non_production_ledgers, + writable_ledgers=write_ledgers, + endorser_map=ledger_endorser_map, + ) except ClassNotFoundError as err: raise InjectionError( f"Unknown multiple Indy ledger manager type: {manager_type}" diff --git a/aries_cloudagent/ledger/multiple_ledger/tests/test_indy_ledger_requests.py b/aries_cloudagent/ledger/multiple_ledger/tests/test_indy_ledger_requests.py index cb7ebf0529..6b1295386b 100644 --- a/aries_cloudagent/ledger/multiple_ledger/tests/test_indy_ledger_requests.py +++ b/aries_cloudagent/ledger/multiple_ledger/tests/test_indy_ledger_requests.py @@ -8,7 +8,7 @@ BaseMultipleLedgerManager, MultipleLedgerManagerError, ) -from ...indy import IndySdkLedger, IndySdkLedgerPool +from ...indy_vdr import IndyVdrLedger, IndyVdrLedgerPool from ..ledger_requests_executor import IndyLedgerRequestsExecutor @@ -26,9 +26,7 @@ async def asyncSetUp(self): "genesis_transactions": "genesis_transactions", } ] - self.ledger = IndySdkLedger( - IndySdkLedgerPool("test_prod_1", checked=True), self.profile - ) + self.ledger = IndyVdrLedger(IndyVdrLedgerPool("test_prod_1"), self.profile) self.profile.context.injector.bind_instance( BaseMultipleLedgerManager, mock.MagicMock( diff --git a/aries_cloudagent/ledger/multiple_ledger/tests/test_indy_manager.py b/aries_cloudagent/ledger/multiple_ledger/tests/test_indy_manager.py deleted file mode 100644 index f280c8b06c..0000000000 --- a/aries_cloudagent/ledger/multiple_ledger/tests/test_indy_manager.py +++ /dev/null @@ -1,528 +0,0 @@ -import asyncio -from copy import deepcopy -import pytest -import json - -from unittest import IsolatedAsyncioTestCase -from aries_cloudagent.tests import mock - -from collections import OrderedDict - -from ....cache.base import BaseCache -from ....cache.in_memory import InMemoryCache -from ....core.in_memory import InMemoryProfile -from ....ledger.base import BaseLedger -from ....messaging.responder import BaseResponder - -from ...error import LedgerError -from ...indy import IndySdkLedger, IndySdkLedgerPool -from ...merkel_validation.tests.test_data import GET_NYM_REPLY - -from .. import indy_manager as test_module -from ..base_manager import MultipleLedgerManagerError -from ..indy_manager import MultiIndyLedgerManager - - -@pytest.mark.indy -class TestMultiIndyLedgerManager(IsolatedAsyncioTestCase): - async def asyncSetUp(self): - self.profile = InMemoryProfile.test_profile(bind={BaseCache: InMemoryCache()}) - self.context = self.profile.context - setattr(self.context, "profile", self.profile) - self.responder = mock.CoroutineMock(send=mock.CoroutineMock()) - self.context.injector.bind_instance(BaseResponder, self.responder) - self.production_ledger = OrderedDict() - self.non_production_ledger = OrderedDict() - test_prod_ledger = IndySdkLedger( - IndySdkLedgerPool("test_prod_1", checked=True), self.profile - ) - writable_ledgers = set() - self.context.injector.bind_instance(BaseLedger, test_prod_ledger) - self.production_ledger["test_prod_1"] = test_prod_ledger - self.production_ledger["test_prod_2"] = IndySdkLedger( - IndySdkLedgerPool("test_prod_2", checked=True), self.profile - ) - self.non_production_ledger["test_non_prod_1"] = IndySdkLedger( - IndySdkLedgerPool("test_non_prod_1", checked=True), self.profile - ) - self.non_production_ledger["test_non_prod_2"] = IndySdkLedger( - IndySdkLedgerPool("test_non_prod_2", checked=True), self.profile - ) - writable_ledgers.add("test_prod_1") - writable_ledgers.add("test_prod_2") - self.manager = MultiIndyLedgerManager( - self.profile, - production_ledgers=self.production_ledger, - non_production_ledgers=self.non_production_ledger, - writable_ledgers=writable_ledgers, - ) - - def test_get_endorser_info_for_ledger(self): - writable_ledgers = set() - writable_ledgers.add("test_prod_1") - writable_ledgers.add("test_prod_2") - - endorser_info_map = {} - endorser_info_map["test_prod_1"] = { - "endorser_did": "test_public_did_1", - "endorser_alias": "endorser_1", - } - endorser_info_map["test_prod_2"] = { - "endorser_did": "test_public_did_2", - "endorser_alias": "endorser_2", - } - manager = MultiIndyLedgerManager( - self.profile, - production_ledgers=self.production_ledger, - non_production_ledgers=self.non_production_ledger, - writable_ledgers=writable_ledgers, - endorser_map=endorser_info_map, - ) - assert ( - "endorser_1" - ), "test_public_did_1" == manager.get_endorser_info_for_ledger("test_prod_1") - assert ( - "endorser_2" - ), "test_public_did_2" == manager.get_endorser_info_for_ledger("test_prod_2") - - async def test_get_write_ledgers(self): - ledger_ids = await self.manager.get_write_ledgers() - assert "test_prod_1" in ledger_ids - assert "test_prod_2" in ledger_ids - - async def test_get_write_ledger_from_base_ledger(self): - ledger_id = await self.manager.get_ledger_id_by_ledger_pool_name("test_prod_1") - assert ledger_id == "test_prod_1" - - async def test_set_profile_write_ledger(self): - writable_ledgers = set() - writable_ledgers.add("test_prod_1") - writable_ledgers.add("test_prod_2") - endorser_info_map = {} - endorser_info_map["test_prod_2"] = { - "endorser_did": "test_public_did_2", - "endorser_alias": "endorser_2", - } - manager = MultiIndyLedgerManager( - self.profile, - production_ledgers=self.production_ledger, - non_production_ledgers=self.non_production_ledger, - writable_ledgers=writable_ledgers, - endorser_map=endorser_info_map, - ) - profile = InMemoryProfile.test_profile() - assert not profile.inject_or(BaseLedger) - assert "test_prod_2" in manager.writable_ledgers - new_write_ledger_id = await manager.set_profile_write_ledger( - profile=profile, ledger_id="test_prod_2" - ) - assert new_write_ledger_id == "test_prod_2" - new_write_ledger = profile.inject_or(BaseLedger) - assert new_write_ledger.pool_name == "test_prod_2" - - async def test_set_profile_write_ledger_x(self): - profile = InMemoryProfile.test_profile() - with self.assertRaises(MultipleLedgerManagerError) as cm: - new_write_ledger_id = await self.manager.set_profile_write_ledger( - profile=profile, ledger_id="test_non_prod_1" - ) - assert "is not write configurable" in str(cm.exception.message) - - async def test_get_ledger_inst_by_id(self): - ledger_inst = await self.manager.get_ledger_inst_by_id("test_prod_2") - assert ledger_inst - ledger_inst = await self.manager.get_ledger_inst_by_id("test_non_prod_2") - assert ledger_inst - ledger_inst = await self.manager.get_ledger_inst_by_id("test_invalid") - assert not ledger_inst - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_ledger_by_did_self_cert_a( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - with mock.patch.object( - test_module.asyncio, "wait", mock.CoroutineMock() - ) as mock_wait: - mock_build_get_nym_req.return_value = mock.MagicMock() - mock_submit.return_value = json.dumps(GET_NYM_REPLY) - mock_wait.return_value = mock_submit.return_value - ( - ledger_id, - ledger_inst, - is_self_certified, - ) = await self.manager._get_ledger_by_did( - "test_prod_1", "Av63wJYM7xYR4AiygYq4c3" - ) - assert ledger_id == "test_prod_1" - assert ledger_inst.pool.name == "test_prod_1" - assert is_self_certified - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_ledger_by_did_self_cert_b( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - self.non_production_ledger = OrderedDict() - self.non_production_ledger["test_non_prod_1"] = IndySdkLedger( - IndySdkLedgerPool("test_non_prod_1", checked=True), self.profile - ) - self.non_production_ledger["test_non_prod_2"] = IndySdkLedger( - IndySdkLedgerPool("test_non_prod_2", checked=True), self.profile - ) - self.manager = MultiIndyLedgerManager( - self.profile, - non_production_ledgers=self.non_production_ledger, - ) - with mock.patch.object( - test_module.asyncio, "wait", mock.CoroutineMock() - ) as mock_wait: - mock_build_get_nym_req.return_value = mock.MagicMock() - mock_submit.return_value = json.dumps(GET_NYM_REPLY) - mock_wait.return_value = mock_submit.return_value - ( - ledger_id, - ledger_inst, - is_self_certified, - ) = await self.manager._get_ledger_by_did( - "test_non_prod_1", "Av63wJYM7xYR4AiygYq4c3" - ) - assert ledger_id == "test_non_prod_1" - assert ledger_inst.pool.name == "test_non_prod_1" - assert is_self_certified - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_ledger_by_did_not_self_cert( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - get_nym_reply = deepcopy(GET_NYM_REPLY) - get_nym_reply["result"]["data"] = json.dumps( - { - "dest": "Av63wJYM7xYR4AiygYq4c3", - "identifier": "V4SGRU86Z58d6TV7PBUe6f", - "role": "101", - "seqNo": 17794, - "txnTime": 1632262244, - "verkey": "ABUF7uxYTxZ6qYdZ4G9e1Gi", - } - ) - with mock.patch.object( - test_module.asyncio, "wait", mock.CoroutineMock() - ) as mock_wait, mock.patch.object( - test_module.SubTrie, "verify_spv_proof", mock.CoroutineMock() - ) as mock_verify_spv_proof: - mock_build_get_nym_req.return_value = mock.MagicMock() - mock_submit.return_value = json.dumps(get_nym_reply) - mock_wait.return_value = mock_submit.return_value - mock_verify_spv_proof.return_value = True - ( - ledger_id, - ledger_inst, - is_self_certified, - ) = await self.manager._get_ledger_by_did( - "test_prod_1", "Av63wJYM7xYR4AiygYq4c3" - ) - assert ledger_id == "test_prod_1" - assert ledger_inst.pool.name == "test_prod_1" - assert not is_self_certified - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_ledger_by_did_state_proof_not_valid( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - get_nym_reply = deepcopy(GET_NYM_REPLY) - get_nym_reply["result"]["data"]["verkey"] = "ABUF7uxYTxZ6qYdZ4G9e1Gi" - with mock.patch.object( - test_module.asyncio, "wait", mock.CoroutineMock() - ) as mock_wait: - mock_build_get_nym_req.return_value = mock.MagicMock() - mock_submit.return_value = json.dumps(get_nym_reply) - mock_wait.return_value = mock_submit.return_value - assert not await self.manager._get_ledger_by_did( - "test_prod_1", "Av63wJYM7xYR4AiygYq4c3" - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_ledger_by_did_no_data( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - get_nym_reply = deepcopy(GET_NYM_REPLY) - get_nym_reply.get("result").pop("data") - with mock.patch.object( - test_module.asyncio, "wait", mock.CoroutineMock() - ) as mock_wait: - mock_build_get_nym_req.return_value = mock.MagicMock() - mock_submit.return_value = json.dumps(get_nym_reply) - mock_wait.return_value = mock_submit.return_value - assert not await self.manager._get_ledger_by_did( - "test_prod_1", "Av63wJYM7xYR4AiygYq4c3" - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_ledger_by_did_timeout( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - mock_build_get_nym_req.return_value = mock.MagicMock() - mock_submit.side_effect = asyncio.TimeoutError - assert not await self.manager._get_ledger_by_did( - "test_prod_1", "Av63wJYM7xYR4AiygYq4c3" - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_ledger_by_did_ledger_error( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - mock_build_get_nym_req.return_value = mock.MagicMock() - mock_submit.side_effect = LedgerError - assert not await self.manager._get_ledger_by_did( - "test_prod_1", "Av63wJYM7xYR4AiygYq4c3" - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_lookup_did_in_configured_ledgers_self_cert_prod( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - with mock.patch.object( - test_module.asyncio, "wait", mock.CoroutineMock() - ) as mock_wait: - mock_build_get_nym_req.return_value = mock.MagicMock() - mock_submit.return_value = json.dumps(GET_NYM_REPLY) - mock_wait.return_value = mock_submit.return_value - ( - ledger_id, - ledger_inst, - ) = await self.manager.lookup_did_in_configured_ledgers( - "Av63wJYM7xYR4AiygYq4c3", cache_did=True - ) - assert ledger_id == "test_prod_1" - assert ledger_inst.pool.name == "test_prod_1" - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_ledger_by_did_not_self_cert_not_self_cert_prod( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - get_nym_reply = deepcopy(GET_NYM_REPLY) - get_nym_reply["result"]["data"]["verkey"] = "ABUF7uxYTxZ6qYdZ4G9e1Gi" - with mock.patch.object( - test_module.asyncio, "wait", mock.CoroutineMock() - ) as mock_wait, mock.patch.object( - test_module.SubTrie, "verify_spv_proof", mock.CoroutineMock() - ) as mock_verify_spv_proof: - mock_build_get_nym_req.return_value = mock.MagicMock() - mock_submit.return_value = json.dumps(get_nym_reply) - mock_wait.return_value = mock_submit.return_value - mock_verify_spv_proof.return_value = True - ( - ledger_id, - ledger_inst, - ) = await self.manager.lookup_did_in_configured_ledgers( - "Av63wJYM7xYR4AiygYq4c3", cache_did=True - ) - assert ledger_id == "test_prod_1" - assert ledger_inst.pool.name == "test_prod_1" - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_lookup_did_in_configured_ledgers_self_cert_non_prod( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - self.non_production_ledger = OrderedDict() - self.non_production_ledger["test_non_prod_1"] = IndySdkLedger( - IndySdkLedgerPool("test_non_prod_1", checked=True), self.profile - ) - self.non_production_ledger["test_non_prod_2"] = IndySdkLedger( - IndySdkLedgerPool("test_non_prod_2", checked=True), self.profile - ) - self.manager = MultiIndyLedgerManager( - self.profile, - non_production_ledgers=self.non_production_ledger, - ) - with mock.patch.object( - test_module.asyncio, "wait", mock.CoroutineMock() - ) as mock_wait: - mock_build_get_nym_req.return_value = mock.MagicMock() - mock_submit.return_value = json.dumps(GET_NYM_REPLY) - mock_wait.return_value = mock_submit.return_value - ( - ledger_id, - ledger_inst, - ) = await self.manager.lookup_did_in_configured_ledgers( - "Av63wJYM7xYR4AiygYq4c3", cache_did=True - ) - assert ledger_id == "test_non_prod_1" - assert ledger_inst.pool.name == "test_non_prod_1" - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_ledger_by_did_not_self_cert_not_self_cert_non_prod( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - self.non_production_ledger = OrderedDict() - self.non_production_ledger["test_non_prod_1"] = IndySdkLedger( - IndySdkLedgerPool("test_non_prod_1", checked=True), self.profile - ) - self.non_production_ledger["test_non_prod_2"] = IndySdkLedger( - IndySdkLedgerPool("test_non_prod_2", checked=True), self.profile - ) - self.manager = MultiIndyLedgerManager( - self.profile, - non_production_ledgers=self.non_production_ledger, - ) - get_nym_reply = deepcopy(GET_NYM_REPLY) - get_nym_reply["result"]["data"]["verkey"] = "ABUF7uxYTxZ6qYdZ4G9e1Gi" - with mock.patch.object( - test_module.asyncio, "wait", mock.CoroutineMock() - ) as mock_wait, mock.patch.object( - test_module.SubTrie, "verify_spv_proof", mock.CoroutineMock() - ) as mock_verify_spv_proof: - mock_build_get_nym_req.return_value = mock.MagicMock() - mock_submit.return_value = json.dumps(get_nym_reply) - mock_wait.return_value = mock_submit.return_value - mock_verify_spv_proof.return_value = True - ( - ledger_id, - ledger_inst, - ) = await self.manager.lookup_did_in_configured_ledgers( - "Av63wJYM7xYR4AiygYq4c3", cache_did=True - ) - assert ledger_id == "test_non_prod_1" - assert ledger_inst.pool.name == "test_non_prod_1" - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_lookup_did_in_configured_ledgers_x( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - with mock.patch.object( - test_module.asyncio, "wait", mock.CoroutineMock() - ) as mock_wait, mock.patch.object( - test_module.SubTrie, "verify_spv_proof", mock.CoroutineMock() - ) as mock_verify_spv_proof: - mock_build_get_nym_req.return_value = mock.MagicMock() - mock_submit.return_value = json.dumps(GET_NYM_REPLY) - mock_wait.return_value = mock_submit.return_value - mock_verify_spv_proof.return_value = False - with self.assertRaises(MultipleLedgerManagerError) as cm: - await self.manager.lookup_did_in_configured_ledgers( - "Av63wJYM7xYR4AiygYq4c3", cache_did=True - ) - assert "not found in any of the ledgers total: (production: " in cm - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_lookup_did_in_configured_ledgers_prod_not_cached( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - with mock.patch.object( - test_module.asyncio, "wait", mock.CoroutineMock() - ) as mock_wait: - mock_build_get_nym_req.return_value = mock.MagicMock() - mock_submit.return_value = json.dumps(GET_NYM_REPLY) - mock_wait.return_value = mock_submit.return_value - ( - ledger_id, - ledger_inst, - ) = await self.manager.lookup_did_in_configured_ledgers( - "Av63wJYM7xYR4AiygYq4c3", cache_did=False - ) - assert ledger_id == "test_prod_1" - assert ledger_inst.pool.name == "test_prod_1" - - async def test_lookup_did_in_configured_ledgers_cached_prod_ledger(self): - cache = InMemoryCache() - await cache.set("did_ledger_id_resolver::Av63wJYM7xYR4AiygYq4c3", "test_prod_2") - self.profile.context.injector.bind_instance(BaseCache, cache) - ( - ledger_id, - ledger_inst, - ) = await self.manager.lookup_did_in_configured_ledgers( - "Av63wJYM7xYR4AiygYq4c3", cache_did=True - ) - assert ledger_id == "test_prod_2" - assert ledger_inst.pool.name == "test_prod_2" - - async def test_lookup_did_in_configured_ledgers_cached_non_prod_ledger(self): - cache = InMemoryCache() - await cache.set( - "did_ledger_id_resolver::Av63wJYM7xYR4AiygYq4c3", "test_non_prod_2", None - ) - self.profile.context.injector.bind_instance(BaseCache, cache) - ( - ledger_id, - ledger_inst, - ) = await self.manager.lookup_did_in_configured_ledgers( - "Av63wJYM7xYR4AiygYq4c3", cache_did=True - ) - assert ledger_id == "test_non_prod_2" - assert ledger_inst.pool.name == "test_non_prod_2" - - async def test_lookup_did_in_configured_ledgers_cached_x(self): - cache = InMemoryCache() - await cache.set("did_ledger_id_resolver::Av63wJYM7xYR4AiygYq4c3", "invalid_id") - self.profile.context.injector.bind_instance(BaseCache, cache) - with self.assertRaises(MultipleLedgerManagerError) as cm: - await self.manager.lookup_did_in_configured_ledgers( - "Av63wJYM7xYR4AiygYq4c3", cache_did=True - ) - assert "cached ledger_id invalid_id not found in either" in cm - - def test_extract_did_from_identifier(self): - assert ( - self.manager.extract_did_from_identifier( - "WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0" - ) - == "WgWxqztrNooG92RXvxSTWv" - ) - assert ( - self.manager.extract_did_from_identifier( - "WgWxqztrNooG92RXvxSTWv:3:CL:20:tag" - ) - == "WgWxqztrNooG92RXvxSTWv" - ) - assert ( - self.manager.extract_did_from_identifier("WgWxqztrNooG92RXvxSTWv") - == "WgWxqztrNooG92RXvxSTWv" - ) - assert ( - self.manager.extract_did_from_identifier("did:sov:WgWxqztrNooG92RXvxSTWv") - == "WgWxqztrNooG92RXvxSTWv" - ) - - async def test_get_production_ledgers(self): - assert len(await self.manager.get_prod_ledgers()) == 2 - - async def test_get_non_production_ledgers(self): - assert len(await self.manager.get_nonprod_ledgers()) == 2 diff --git a/aries_cloudagent/ledger/multiple_ledger/tests/test_manager_provider.py b/aries_cloudagent/ledger/multiple_ledger/tests/test_manager_provider.py index d6b8b70706..80091cfb01 100644 --- a/aries_cloudagent/ledger/multiple_ledger/tests/test_manager_provider.py +++ b/aries_cloudagent/ledger/multiple_ledger/tests/test_manager_provider.py @@ -1,15 +1,12 @@ import pytest -from unittest import mock from unittest import IsolatedAsyncioTestCase from ....askar.profile import AskarProfileManager from ....config.injection_context import InjectionContext from ....core.in_memory import InMemoryProfile -from ....indy.sdk.profile import IndySdkProfile -from ....indy.sdk.wallet_setup import IndyOpenWallet, IndyWalletConfig from ....ledger.base import BaseLedger -from ....ledger.indy import IndySdkLedgerPool, IndySdkLedger +from ....ledger.indy_vdr import IndyVdrLedger, IndyVdrLedgerPool from ..base_manager import MultipleLedgerManagerError from ..manager_provider import MultiIndyLedgerManagerProvider @@ -66,30 +63,6 @@ async def test_provide_invalid_manager(self): with self.assertRaises(MultipleLedgerManagerError): provider.provide(context.settings, context.injector) - @pytest.mark.indy - async def test_provide_indy_manager(self): - context = InjectionContext() - with mock.patch.object(IndySdkProfile, "_make_finalizer"): - profile = IndySdkProfile( - IndyOpenWallet( - config=IndyWalletConfig({"name": "test-profile"}), - created=True, - handle=1, - master_secret_id="master-secret", - ), - context, - ) - context.injector.bind_instance( - BaseLedger, IndySdkLedger(IndySdkLedgerPool("name"), profile) - ) - provider = MultiIndyLedgerManagerProvider(profile) - context.settings["ledger.ledger_config_list"] = LEDGER_CONFIG - context.settings["ledger.genesis_transactions"] = TEST_GENESIS_TXN - self.assertEqual( - provider.provide(context.settings, context.injector).__class__.__name__, - "MultiIndyLedgerManager", - ) - @pytest.mark.askar async def test_provide_askar_manager(self): context = InjectionContext() @@ -104,7 +77,7 @@ async def test_provide_askar_manager(self): }, ) context.injector.bind_instance( - BaseLedger, IndySdkLedger(IndySdkLedgerPool("name"), profile) + BaseLedger, IndyVdrLedger(IndyVdrLedgerPool("name"), profile) ) provider = MultiIndyLedgerManagerProvider(profile) context.settings["ledger.ledger_config_list"] = LEDGER_CONFIG diff --git a/aries_cloudagent/ledger/routes.py b/aries_cloudagent/ledger/routes.py index 4d1f270afe..8fbf8ee102 100644 --- a/aries_cloudagent/ledger/routes.py +++ b/aries_cloudagent/ledger/routes.py @@ -11,9 +11,9 @@ request_schema, response_schema, ) - from marshmallow import fields, validate +from ..admin.decorators.auth import tenant_authentication from ..admin.request_context import AdminRequestContext from ..connections.models.conn_record import ConnRecord from ..messaging.models.base import BaseModelError @@ -262,6 +262,7 @@ class WriteLedgerRequestSchema(OpenAPISchema): @querystring_schema(CreateDidTxnForEndorserOptionSchema()) @querystring_schema(SchemaConnIdMatchInfoSchema()) @response_schema(TxnOrRegisterLedgerNymResponseSchema(), 200, description="") +@tenant_authentication async def register_ledger_nym(request: web.BaseRequest): """Request handler for registering a NYM with the ledger. @@ -425,6 +426,7 @@ async def register_ledger_nym(request: web.BaseRequest): ) @querystring_schema(QueryStringDIDSchema) @response_schema(GetNymRoleResponseSchema(), 200, description="") +@tenant_authentication async def get_nym_role(request: web.BaseRequest): """Request handler for getting the role from the NYM registration of a public DID. @@ -471,6 +473,7 @@ async def get_nym_role(request: web.BaseRequest): @docs(tags=["ledger"], summary="Rotate key pair for public DID.") @response_schema(LedgerModulesResultSchema(), 200, description="") +@tenant_authentication async def rotate_public_did_keypair(request: web.BaseRequest): """Request handler for rotating key pair associated with public DID. @@ -500,6 +503,7 @@ async def rotate_public_did_keypair(request: web.BaseRequest): ) @querystring_schema(QueryStringDIDSchema()) @response_schema(GetDIDVerkeyResponseSchema(), 200, description="") +@tenant_authentication async def get_did_verkey(request: web.BaseRequest): """Request handler for getting a verkey for a DID from the ledger. @@ -548,6 +552,7 @@ async def get_did_verkey(request: web.BaseRequest): ) @querystring_schema(QueryStringEndpointSchema()) @response_schema(GetDIDEndpointResponseSchema(), 200, description="") +@tenant_authentication async def get_did_endpoint(request: web.BaseRequest): """Request handler for getting a verkey for a DID from the ledger. @@ -593,6 +598,7 @@ async def get_did_endpoint(request: web.BaseRequest): @docs(tags=["ledger"], summary="Fetch the current transaction author agreement, if any") @response_schema(TAAResultSchema, 200, description="") +@tenant_authentication async def ledger_get_taa(request: web.BaseRequest): """Request handler for fetching the transaction author agreement. @@ -633,6 +639,7 @@ async def ledger_get_taa(request: web.BaseRequest): @docs(tags=["ledger"], summary="Accept the transaction author agreement") @request_schema(TAAAcceptSchema) @response_schema(LedgerModulesResultSchema(), 200, description="") +@tenant_authentication async def ledger_accept_taa(request: web.BaseRequest): """Request handler for accepting the current transaction author agreement. @@ -693,6 +700,7 @@ async def ledger_accept_taa(request: web.BaseRequest): @docs(tags=["ledger"], summary="Fetch list of available write ledgers") @response_schema(ConfigurableWriteLedgersSchema, 200, description="") +@tenant_authentication async def get_write_ledgers(request: web.BaseRequest): """Request handler for fetching the list of available write ledgers. @@ -714,6 +722,7 @@ async def get_write_ledgers(request: web.BaseRequest): @docs(tags=["ledger"], summary="Fetch the current write ledger") @response_schema(WriteLedgerSchema, 200, description="") +@tenant_authentication async def get_write_ledger(request: web.BaseRequest): """Request handler for fetching the currently set write ledger. @@ -739,6 +748,7 @@ async def get_write_ledger(request: web.BaseRequest): @docs(tags=["ledger"], summary="Set write ledger") @match_info_schema(WriteLedgerRequestSchema()) @response_schema(WriteLedgerSchema, 200, description="") +@tenant_authentication async def set_write_ledger(request: web.BaseRequest): """Request handler for setting write ledger. @@ -769,6 +779,7 @@ async def set_write_ledger(request: web.BaseRequest): tags=["ledger"], summary="Fetch the multiple ledger configuration currently in use" ) @response_schema(LedgerConfigListSchema, 200, description="") +@tenant_authentication async def get_ledger_config(request: web.BaseRequest): """Request handler for fetching the ledger configuration list in use. diff --git a/aries_cloudagent/ledger/tests/test_indy.py b/aries_cloudagent/ledger/tests/test_indy.py deleted file mode 100644 index 9b702d98bd..0000000000 --- a/aries_cloudagent/ledger/tests/test_indy.py +++ /dev/null @@ -1,3498 +0,0 @@ -import asyncio -import json -import tempfile -import pytest - -from os import path - -from aries_cloudagent.tests import mock -from unittest import IsolatedAsyncioTestCase - -from ...config.injection_context import InjectionContext -from ...cache.in_memory import InMemoryCache -from ...indy.issuer import IndyIssuer, IndyIssuerError -from ...indy.sdk.profile import IndySdkProfile -from ...storage.record import StorageRecord -from ...wallet.base import BaseWallet -from ...wallet.did_info import DIDInfo -from ...wallet.did_posture import DIDPosture -from ...wallet.error import WalletNotFoundError -from ...wallet.indy import IndySdkWallet -from ...wallet.key_type import ED25519 -from ...wallet.did_method import SOV - -from ..endpoint_type import EndpointType -from ..indy import ( - BadLedgerRequestError, - ClosedPoolError, - ErrorCode, - IndyErrorHandler, - IndyError, - IndySdkLedger, - IndySdkLedgerPool, - IndySdkLedgerPoolProvider, - GENESIS_TRANSACTION_FILE, - LedgerConfigError, - LedgerError, - LedgerTransactionError, - Role, - TAA_ACCEPTED_RECORD_TYPE, -) - - -GENESIS_TRANSACTION_PATH = path.join( - tempfile.gettempdir(), f"name_{GENESIS_TRANSACTION_FILE}" -) - - -@pytest.mark.indy -class TestIndySdkLedgerPoolProvider(IsolatedAsyncioTestCase): - async def test_provide(self): - provider = IndySdkLedgerPoolProvider() - mock_injector = mock.MagicMock(inject=mock.MagicMock(return_value=None)) - provider.provide( - settings={ - "ledger.read_only": True, - "ledger.genesis_transactions": "genesis-txns", - }, - injector=mock_injector, - ) - - -@pytest.mark.indy -class TestIndySdkLedger(IsolatedAsyncioTestCase): - async def asyncSetUp(self): - self.test_did = "55GkHamhTU1ZbTbV2ab9DE" - self.test_did_info = DIDInfo( - did=self.test_did, - verkey="3Dn1SJNPaCXcvvJvSbsFWP2xaCjMom3can8CQNhWrTRx", - metadata={"test": "test"}, - method=SOV, - key_type=ED25519, - ) - self.test_verkey = "3Dn1SJNPaCXcvvJvSbsFWP2xaCjMom3can8CQNhWrTRx" - context = InjectionContext() - context.injector.bind_instance(IndySdkLedgerPool, IndySdkLedgerPool("name")) - with mock.patch.object(IndySdkProfile, "_make_finalizer"): - self.profile = IndySdkProfile( - mock.CoroutineMock(), - context, - ) - self.session = await self.profile.session() - - @mock.patch("indy.pool.create_pool_ledger_config") - @mock.patch("indy.pool.list_pools") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("builtins.open") - async def test_init( - self, mock_open, mock_open_ledger, mock_list_pools, mock_create_config - ): - mock_open.return_value = mock.MagicMock() - mock_list_pools.return_value = [] - - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger( - IndySdkLedgerPool("name", genesis_transactions="genesis_transactions"), - self.profile, - ) - - assert ledger.pool_name == "name" - assert not ledger.read_only - assert ledger.backend - assert ledger.profile is self.profile - - await ledger.__aenter__() - - mock_open.assert_called_once_with(GENESIS_TRANSACTION_PATH, "w") - mock_open.return_value.__enter__.return_value.write.assert_called_once_with( - "genesis_transactions" - ) - mock_create_config.assert_called_once_with( - "name", json.dumps({"genesis_txn": GENESIS_TRANSACTION_PATH}) - ) - assert ledger.did_to_nym(ledger.nym_to_did(self.test_did)) == self.test_did - - @mock.patch("indy.pool.create_pool_ledger_config") - @mock.patch("indy.pool.list_pools") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("builtins.open") - async def test_init_not_checked( - self, mock_open, mock_open_ledger, mock_list_pools, mock_create_config - ): - mock_open.return_value = mock.MagicMock() - mock_list_pools.return_value = [] - - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger(IndySdkLedgerPool("name"), self.profile) - - assert ledger.pool_name == "name" - assert ledger.backend - assert ledger.profile is self.profile - - with self.assertRaises(LedgerError): - await ledger.__aenter__() - - mock_list_pools.return_value = [{"pool": ledger.pool_name}] - await ledger.__aenter__() - - @mock.patch("indy.pool.list_pools") - @mock.patch("builtins.open") - async def test_init_do_not_recreate(self, mock_open, mock_list_pools): - mock_open.return_value = mock.MagicMock() - mock_list_pools.return_value = [{"pool": "name"}, {"pool": "another"}] - - pool = IndySdkLedgerPool("name") - assert pool.name == "name" - - with self.assertRaises(LedgerConfigError): - await pool.create_pool_config("genesis_transactions", recreate=False) - - mock_open.assert_called_once_with(GENESIS_TRANSACTION_PATH, "w") - - @mock.patch("indy.pool.create_pool_ledger_config") - @mock.patch("indy.pool.delete_pool_ledger_config") - @mock.patch("indy.pool.list_pools") - @mock.patch("builtins.open") - async def test_init_recreate( - self, mock_open, mock_list_pools, mock_delete_config, mock_create_config - ): - mock_open.return_value = mock.MagicMock() - mock_list_pools.return_value = [{"pool": "name"}, {"pool": "another"}] - mock_delete_config.return_value = None - - pool = IndySdkLedgerPool("name") - assert pool.name == "name" - - await pool.create_pool_config("genesis_transactions", recreate=True) - - mock_open.assert_called_once_with(GENESIS_TRANSACTION_PATH, "w") - mock_delete_config.assert_called_once_with("name") - mock_create_config.assert_called_once_with( - "name", json.dumps({"genesis_txn": GENESIS_TRANSACTION_PATH}) - ) - - @mock.patch("indy.pool.set_protocol_version") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - async def test_aenter_aexit( - self, mock_close_pool, mock_open_ledger, mock_set_proto - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger as led: - mock_set_proto.assert_called_once_with(2) - mock_open_ledger.assert_called_once_with("name", "{}") - assert led == ledger - mock_close_pool.assert_not_called() - assert led.pool_handle == mock_open_ledger.return_value - - mock_close_pool.assert_called_once() - assert ledger.pool_handle is None - - @mock.patch("indy.pool.set_protocol_version") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - async def test_aenter_aexit_nested_keepalive( - self, mock_close_pool, mock_open_ledger, mock_set_proto - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, keepalive=1), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger as led0: - mock_set_proto.assert_called_once_with(2) - mock_open_ledger.assert_called_once_with("name", "{}") - assert led0 == ledger - mock_close_pool.assert_not_called() - assert led0.pool_handle == mock_open_ledger.return_value - - async with ledger as led1: - assert ledger.pool.ref_count == 1 - - mock_close_pool.assert_not_called() # it's a future - assert ledger.pool_handle - - await asyncio.sleep(1.01) - mock_close_pool.assert_called_once() - assert ledger.pool_handle is None - - @mock.patch("indy.pool.set_protocol_version") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - async def test_aenter_aexit_close_x( - self, mock_close_pool, mock_open_ledger, mock_set_proto - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_close_pool.side_effect = IndyError(ErrorCode.PoolLedgerTimeout) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - with self.assertRaises(LedgerError): - async with ledger as led: - assert led.pool_handle == mock_open_ledger.return_value - - assert ledger.pool_handle == mock_open_ledger.return_value - assert ledger.pool.ref_count == 1 - - @mock.patch("indy.pool.set_protocol_version") - @mock.patch("indy.pool.create_pool_ledger_config") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - async def test_submit_pool_closed( - self, mock_close_pool, mock_open_ledger, mock_create_config, mock_set_proto - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - with self.assertRaises(ClosedPoolError) as context: - await ledger._submit("{}") - assert "sign and submit request to closed pool" in str(context.exception) - - @mock.patch("indy.pool.set_protocol_version") - @mock.patch("indy.pool.create_pool_ledger_config") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - @mock.patch("indy.ledger.sign_and_submit_request") - @mock.patch("indy.ledger.multi_sign_request") - async def test_submit_signed( - self, - mock_indy_multi_sign, - mock_sign_submit, - mock_close_pool, - mock_open_ledger, - mock_create_config, - mock_set_proto, - ): - mock_indy_multi_sign.return_value = json.dumps({"endorsed": "content"}) - mock_sign_submit.return_value = '{"op": "REPLY"}' - - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - async with ledger: - mock_wallet_get_public_did.return_value = None - - with self.assertRaises(BadLedgerRequestError): - await ledger._submit("{}", True) - - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - mock_did = mock_wallet_get_public_did.return_value - mock_did.did = self.test_did - - await ledger._submit( - request_json="{}", - sign=True, - taa_accept=False, - ) - - result_json = await ledger._submit( # multi-sign for later endorsement - request_json="{}", - sign=True, - taa_accept=False, - write_ledger=False, - ) - assert json.loads(result_json) == {"endorsed": "content"} - - await ledger.txn_submit( # cover txn_submit() - request_json="{}", - sign=True, - taa_accept=False, - ) - - @mock.patch("indy.pool.set_protocol_version") - @mock.patch("indy.pool.create_pool_ledger_config") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - @mock.patch("indy.ledger.sign_and_submit_request") - @mock.patch("indy.ledger.append_txn_author_agreement_acceptance_to_request") - async def test_submit_signed_taa_accept( - self, - mock_append_taa, - mock_sign_submit, - mock_close_pool, - mock_open_ledger, - mock_create_config, - mock_set_proto, - ): - mock_append_taa.return_value = "{}" - mock_sign_submit.return_value = '{"op": "REPLY"}' - - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True), self.profile - ) - ledger.get_latest_txn_author_acceptance = mock.CoroutineMock( - return_value={ - "text": "sample", - "version": "0.0", - "digest": "digest", - "mechanism": "dummy", - "time": "now", - } - ) - - async with ledger: - mock_did = mock_wallet_get_public_did.return_value - mock_did.did = self.test_did - - await ledger._submit( - request_json="{}", - sign=None, - taa_accept=True, - sign_did=self.test_did_info, - ) - mock_append_taa.assert_called_once_with( - "{}", "sample", "0.0", "digest", "dummy", "now" - ) - - @mock.patch("indy.pool.set_protocol_version") - @mock.patch("indy.pool.create_pool_ledger_config") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - @mock.patch("indy.ledger.submit_request") - async def test_submit_unsigned( - self, - mock_submit, - mock_close_pool, - mock_open_ledger, - mock_create_config, - mock_set_proto, - ): - mock_did = mock.MagicMock() - - future = asyncio.Future() - future.set_result(mock_did) - - mock_submit.return_value = '{"op": "REPLY"}' - - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = future - async with ledger: - await ledger._submit("{}", False) - mock_submit.assert_called_once_with(ledger.pool_handle, "{}") - - @mock.patch("indy.pool.set_protocol_version") - @mock.patch("indy.pool.create_pool_ledger_config") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - @mock.patch("indy.ledger.submit_request") - async def test_submit_unsigned_ledger_transaction_error( - self, - mock_submit, - mock_close_pool, - mock_open_ledger, - mock_create_config, - mock_set_proto, - ): - mock_did = mock.MagicMock() - - future = asyncio.Future() - future.set_result(mock_did) - - mock_submit.return_value = '{"op": "NO-SUCH-OP"}' - - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = future - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True), self.profile - ) - async with ledger: - with self.assertRaises(LedgerTransactionError): - await ledger._submit("{}", False) - mock_submit.assert_called_once_with(ledger.pool_handle, "{}") - - @mock.patch("indy.pool.set_protocol_version") - @mock.patch("indy.pool.create_pool_ledger_config") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - @mock.patch("indy.ledger.submit_request") - async def test_submit_rejected( - self, - mock_submit, - mock_close_pool, - mock_open_ledger, - mock_create_config, - mock_set_proto, - ): - mock_did = mock.MagicMock() - - future = asyncio.Future() - future.set_result(mock_did) - - mock_submit.return_value = '{"op": "REQNACK", "reason": "a reason"}' - - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = future - async with ledger: - with self.assertRaises(LedgerTransactionError) as context: - await ledger._submit("{}", False) - assert "Ledger rejected transaction request" in str(context.exception) - - mock_submit.return_value = '{"op": "REJECT", "reason": "another reason"}' - - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = future - async with ledger: - with self.assertRaises(LedgerTransactionError) as context: - await ledger._submit("{}", False) - assert "Ledger rejected transaction request" in str(context.exception) - - @mock.patch("indy.pool.set_protocol_version") - @mock.patch("indy.pool.create_pool_ledger_config") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - @mock.patch("indy.ledger.multi_sign_request") - async def test_txn_endorse( - self, - mock_indy_multi_sign, - mock_indy_close, - mock_indy_open, - mock_create_config, - mock_set_proto, - ): - mock_indy_multi_sign.return_value = json.dumps({"endorsed": "content"}) - mock_indy_open.return_value = 1 - - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = None - with self.assertRaises(ClosedPoolError): - await ledger.txn_endorse(request_json=json.dumps({"...": "..."})) - - async with ledger: - with self.assertRaises(BadLedgerRequestError): - await ledger.txn_endorse(request_json=json.dumps({"...": "..."})) - - mock_wallet_get_public_did.return_value = self.test_did_info - - endorsed_json = await ledger.txn_endorse( - request_json=json.dumps({"...": "..."}) - ) - assert json.loads(endorsed_json) == {"endorsed": "content"} - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.fetch_schema_by_id") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.fetch_schema_by_seq_no") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.add_record") - @mock.patch("indy.ledger.build_schema_request") - @mock.patch("indy.ledger.append_request_endorser") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.is_ledger_read_only") - async def test_send_schema( - self, - mock_is_ledger_read_only, - mock_append_request_endorser, - mock_build_schema_req, - mock_add_record, - mock_fetch_schema_by_seq_no, - mock_fetch_schema_by_id, - mock_submit, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_is_ledger_read_only.return_value = False - - issuer = mock.MagicMock(IndyIssuer) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - issuer.create_schema.return_value = ("schema_issuer_did:name:1.0", "{}") - mock_fetch_schema_by_id.return_value = None - mock_fetch_schema_by_seq_no.return_value = None - - mock_submit.return_value = ( - r'{"op":"REPLY","result":{"txnMetadata":{"seqNo": 1}}}' - ) - future = asyncio.Future() - future.set_result(mock.MagicMock(add_record=mock.CoroutineMock())) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did, mock.patch.object( - ledger, "get_indy_storage", mock.MagicMock() - ) as mock_get_storage: - mock_get_storage.return_value = future - async with ledger: - mock_wallet_get_public_did.return_value = None - - with self.assertRaises(BadLedgerRequestError): - schema_id, schema_def = await ledger.create_and_send_schema( - issuer, "schema_name", "schema_version", [1, 2, 3] - ) - - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - mock_did = mock_wallet_get_public_did.return_value - mock_did.did = self.test_did - - schema_id, schema_def = await ledger.create_and_send_schema( - issuer, "schema_name", "schema_version", [1, 2, 3] - ) - issuer.create_schema.assert_called_once_with( - mock_did.did, "schema_name", "schema_version", [1, 2, 3] - ) - - mock_build_schema_req.assert_called_once_with( - mock_did.did, issuer.create_schema.return_value[1] - ) - - mock_submit.assert_called_once_with( - mock_build_schema_req.return_value, - sign=True, - sign_did=mock_wallet_get_public_did.return_value, - taa_accept=None, - write_ledger=True, - ) - - assert schema_id == issuer.create_schema.return_value[0] - - schema_id, signed_txn = await ledger.create_and_send_schema( - issuer=issuer, - schema_name="schema_name", - schema_version="schema_version", - attribute_names=[1, 2, 3], - write_ledger=False, - endorser_did=self.test_did, - ) - assert schema_id == issuer.create_schema.return_value[0] - assert "signed_txn" in signed_txn - - @mock.patch("indy.pool.set_protocol_version") - @mock.patch("indy.pool.create_pool_ledger_config") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.check_existing_schema") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.add_record") - @mock.patch("indy.ledger.build_schema_request") - async def test_send_schema_already_exists( - self, - mock_build_schema_req, - mock_add_record, - mock_check_existing, - mock_close_pool, - mock_open_ledger, - mock_create_config, - mock_set_proto, - ): - # mock_did = mock.CoroutineMock() - - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - - issuer = mock.MagicMock(IndyIssuer) - issuer.create_schema.return_value = ("1", "{}") - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - mock_add_record = mock.CoroutineMock() - future = asyncio.Future() - future.set_result( - mock.MagicMock(return_value=mock.MagicMock(add_record=mock_add_record)) - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did, mock.patch.object( - ledger, "get_indy_storage", mock.MagicMock() - ) as mock_get_storage: - mock_get_storage.return_value = future - mock_wallet_get_public_did.return_value = self.test_did_info - fetch_schema_id = ( - f"{mock_wallet_get_public_did.return_value.did}:2:" - "schema_name:schema_version" - ) - mock_check_existing.return_value = (fetch_schema_id, {}) - - async with ledger: - schema_id, schema_def = await ledger.create_and_send_schema( - issuer, "schema_name", "schema_version", [1, 2, 3] - ) - assert schema_id == fetch_schema_id - assert schema_def == {} - - mock_add_record.assert_not_called() - - @mock.patch("indy.pool.set_protocol_version") - @mock.patch("indy.pool.create_pool_ledger_config") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.check_existing_schema") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.add_record") - @mock.patch("indy.ledger.build_schema_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.is_ledger_read_only") - async def test_send_schema_ledger_transaction_error_already_exists( - self, - mock_is_ledger_read_only, - mock_build_schema_req, - mock_add_record, - mock_check_existing, - mock_close_pool, - mock_open_ledger, - mock_create_config, - mock_set_proto, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_is_ledger_read_only.return_value = False - - issuer = mock.MagicMock(IndyIssuer) - issuer.create_schema.return_value = ("1", "{}") - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - ledger._submit = mock.CoroutineMock( - side_effect=LedgerTransactionError("UnauthorizedClientRequest") - ) - future = asyncio.Future() - future.set_result(mock.MagicMock(add_record=mock.CoroutineMock())) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did, mock.patch.object( - ledger, "get_indy_storage", mock.MagicMock() - ) as mock_get_storage: - mock_get_storage.return_value = future - mock_wallet_get_public_did.return_value = self.test_did_info - fetch_schema_id = ( - f"{mock_wallet_get_public_did.return_value.did}:2:" - "schema_name:schema_version" - ) - mock_check_existing.side_effect = [None, (fetch_schema_id, "{}")] - async with ledger: - schema_id, schema_def = await ledger.create_and_send_schema( - issuer, "schema_name", "schema_version", [1, 2, 3] - ) - assert schema_id == fetch_schema_id - - @mock.patch("indy.pool.set_protocol_version") - @mock.patch("indy.pool.create_pool_ledger_config") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.check_existing_schema") - async def test_send_schema_ledger_read_only( - self, - mock_check_existing, - mock_close_pool, - mock_open_ledger, - mock_create_config, - mock_set_proto, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - - issuer = mock.MagicMock(IndyIssuer) - issuer.create_schema.return_value = ("1", "{}") - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, read_only=True), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - fetch_schema_id = ( - f"{mock_wallet_get_public_did.return_value.did}:2:" - "schema_name:schema_version" - ) - mock_check_existing.side_effect = [None, fetch_schema_id] - async with ledger: - with self.assertRaises(LedgerError) as context: - await ledger.create_and_send_schema( - issuer, "schema_name", "schema_version", [1, 2, 3] - ) - assert "read only" in str(context.exception) - - @mock.patch("indy.pool.set_protocol_version") - @mock.patch("indy.pool.create_pool_ledger_config") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.check_existing_schema") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.is_ledger_read_only") - async def test_send_schema_issuer_error( - self, - mock_is_ledger_read_only, - mock_check_existing, - mock_close_pool, - mock_open_ledger, - mock_create_config, - mock_set_proto, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_is_ledger_read_only.return_value = False - - issuer = mock.MagicMock(IndyIssuer) - issuer.create_schema = mock.CoroutineMock( - side_effect=IndyIssuerError("dummy error") - ) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - fetch_schema_id = ( - f"{mock_wallet_get_public_did.return_value.did}:2:" - "schema_name:schema_version" - ) - mock_check_existing.side_effect = [None, fetch_schema_id] - async with ledger: - with self.assertRaises(LedgerError) as context: - await ledger.create_and_send_schema( - issuer, "schema_name", "schema_version", [1, 2, 3] - ) - assert "dummy error" in str(context.exception) - - @mock.patch("indy.pool.set_protocol_version") - @mock.patch("indy.pool.create_pool_ledger_config") - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.check_existing_schema") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.add_record") - @mock.patch("indy.ledger.build_schema_request") - async def test_send_schema_ledger_transaction_error( - self, - mock_build_schema_req, - mock_add_record, - mock_check_existing, - mock_close_pool, - mock_open_ledger, - mock_create_config, - mock_set_proto, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - - issuer = mock.MagicMock(IndyIssuer) - issuer.create_schema.return_value = ("1", "{}") - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - ledger._submit = mock.CoroutineMock( - side_effect=LedgerTransactionError("Some other error message") - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - fetch_schema_id = ( - f"{mock_wallet_get_public_did.return_value.did}:2:" - "schema_name:schema_version" - ) - mock_check_existing.side_effect = [None, fetch_schema_id] - async with ledger: - with self.assertRaises(LedgerTransactionError): - await ledger.create_and_send_schema( - issuer, "schema_name", "schema_version", [1, 2, 3] - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.fetch_schema_by_id") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.fetch_schema_by_seq_no") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.add_record") - @mock.patch("indy.ledger.build_schema_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.is_ledger_read_only") - async def test_send_schema_no_seq_no( - self, - mock_is_ledger_read_only, - mock_build_schema_req, - mock_add_record, - mock_fetch_schema_by_seq_no, - mock_fetch_schema_by_id, - mock_submit, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - issuer = mock.MagicMock(IndyIssuer) - mock_is_ledger_read_only.return_value = False - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - issuer.create_schema.return_value = ("schema_issuer_did:name:1.0", "{}") - mock_fetch_schema_by_id.return_value = None - mock_fetch_schema_by_seq_no.return_value = None - - mock_submit.return_value = ( - r'{"op":"REPLY","result":{"txnMetadata":{"no": "seqNo"}}}' - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - async with ledger: - mock_wallet.get_public_did = mock.CoroutineMock() - mock_did = mock_wallet_get_public_did.return_value - mock_did.did = self.test_did - - with self.assertRaises(LedgerError) as context: - await ledger.create_and_send_schema( - issuer, "schema_name", "schema_version", [1, 2, 3] - ) - assert "schema sequence number" in str(context.exception) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.fetch_schema_by_id") - async def test_check_existing_schema( - self, - mock_fetch_schema_by_id, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - - mock_fetch_schema_by_id.return_value = {"attrNames": ["a", "b", "c"]} - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - mock_did = mock_wallet_get_public_did.return_value - mock_did.did = self.test_did - async with ledger: - schema_id, schema_def = await ledger.check_existing_schema( - public_did=self.test_did, - schema_name="test", - schema_version="1.0", - attribute_names=["c", "b", "a"], - ) - assert schema_id == f"{self.test_did}:2:test:1.0" - - with self.assertRaises(LedgerTransactionError): - await ledger.check_existing_schema( - public_did=self.test_did, - schema_name="test", - schema_version="1.0", - attribute_names=["a", "b", "c", "d"], - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_get_schema_request") - @mock.patch("indy.ledger.parse_get_schema_response") - async def test_get_schema( - self, - mock_parse_get_schema_resp, - mock_build_get_schema_req, - mock_submit, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - - mock_parse_get_schema_resp.return_value = (None, '{"attrNames": ["a", "b"]}') - - mock_submit.return_value = '{"result":{"seqNo":1}}' - - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - mock_did = mock_wallet_get_public_did.return_value - mock_did.did = self.test_did - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, cache=InMemoryCache()), - self.profile, - ) - async with ledger: - response = await ledger.get_schema("schema_id") - mock_wallet_get_public_did.assert_called_once_with() - mock_build_get_schema_req.assert_called_once_with( - mock_did.did, "schema_id" - ) - mock_submit.assert_called_once_with( - mock_build_get_schema_req.return_value, sign_did=mock_did - ) - mock_parse_get_schema_resp.assert_called_once_with( - mock_submit.return_value - ) - - assert response == json.loads( - mock_parse_get_schema_resp.return_value[1] - ) - - response == await ledger.get_schema("schema_id") # cover get-from-cache - assert response == json.loads( - mock_parse_get_schema_resp.return_value[1] - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_get_schema_request") - async def test_get_schema_not_found( - self, - mock_build_get_schema_req, - mock_submit, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - - mock_submit.return_value = json.dumps({"result": {"seqNo": None}}) - - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - mock_did = mock_wallet_get_public_did.return_value - mock_did.did = self.test_did - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, cache=InMemoryCache()), - self.profile, - ) - - async with ledger: - response = await ledger.get_schema("schema_id") - mock_wallet_get_public_did.assert_called_once_with() - mock_build_get_schema_req.assert_called_once_with( - mock_did.did, "schema_id" - ) - mock_submit.assert_called_once_with( - mock_build_get_schema_req.return_value, sign_did=mock_did - ) - - assert response is None - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_get_txn_request") - @mock.patch("indy.ledger.build_get_schema_request") - @mock.patch("indy.ledger.parse_get_schema_response") - async def test_get_schema_by_seq_no( - self, - mock_parse_get_schema_resp, - mock_build_get_schema_req, - mock_build_get_txn_req, - mock_submit, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - - mock_parse_get_schema_resp.return_value = (None, '{"attrNames": ["a", "b"]}') - - submissions = [ - json.dumps( - { - "result": { - "data": { - "txn": { - "type": "101", - "metadata": {"from": self.test_did}, - "data": { - "data": {"name": "preferences", "version": "1.0"} - }, - } - } - } - } - ), - json.dumps({"result": {"seqNo": 999}}), - ] # need to subscript these in assertions later - mock_submit.side_effect = list( - submissions - ) # becomes list iterator, unsubscriptable, in mock object - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - mock_did = mock_wallet_get_public_did.return_value - mock_did.did = self.test_did - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True), self.profile - ) - async with ledger: - response = await ledger.get_schema("999") - mock_wallet_get_public_did.assert_called_once_with() - mock_build_get_txn_req.assert_called_once_with(None, None, seq_no=999) - mock_build_get_schema_req.assert_called_once_with( - mock_did.did, f"{self.test_did}:2:preferences:1.0" - ) - mock_submit.assert_has_calls( - [ - mock.call(mock_build_get_txn_req.return_value), - mock.call( - mock_build_get_schema_req.return_value, sign_did=mock_did - ), - ] - ) - mock_parse_get_schema_resp.assert_called_once_with(submissions[1]) - - assert response == json.loads( - mock_parse_get_schema_resp.return_value[1] - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_get_txn_request") - @mock.patch("indy.ledger.build_get_schema_request") - @mock.patch("indy.ledger.parse_get_schema_response") - async def test_get_schema_by_wrong_seq_no( - self, - mock_parse_get_schema_resp, - mock_build_get_schema_req, - mock_build_get_txn_req, - mock_submit, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - - mock_parse_get_schema_resp.return_value = (None, '{"attrNames": ["a", "b"]}') - - submissions = [ - json.dumps( - { - "result": { - "data": { - "txn": { - "type": "102", - } - } - } - } - ), # not a schema - json.dumps({"result": {"seqNo": 999}}), - ] # need to subscript these in assertions later - mock_submit.side_effect = list( - submissions - ) # becomes list iterator, unsubscriptable, in mock object - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - mock_did = mock_wallet_get_public_did.return_value - mock_did.did = self.test_did - async with ledger: - with self.assertRaises(LedgerTransactionError): - await ledger.get_schema("999") - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.get_schema") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch( - "aries_cloudagent.ledger.indy.IndySdkLedger.fetch_credential_definition" - ) - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.find_all_records") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.add_record") - @mock.patch("indy.ledger.build_cred_def_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.is_ledger_read_only") - async def test_send_credential_definition( - self, - mock_is_ledger_read_only, - mock_build_cred_def, - mock_add_record, - mock_find_all_records, - mock_submit, - mock_fetch_cred_def, - mock_close, - mock_open, - mock_get_schema, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_find_all_records.return_value = [] - mock_is_ledger_read_only.return_value = False - - mock_get_schema.return_value = {"seqNo": 999} - cred_def_id = f"{self.test_did}:3:CL:999:default" - cred_def_value = { - "primary": {"n": "...", "s": "...", "r": "...", "revocation": None} - } - cred_def = { - "ver": "1.0", - "id": cred_def_id, - "schemaId": "999", - "type": "CL", - "tag": "default", - "value": cred_def_value, - } - cred_def_json = json.dumps(cred_def) - - mock_fetch_cred_def.side_effect = [None, cred_def] - - issuer = mock.MagicMock(IndyIssuer) - issuer.make_credential_definition_id.return_value = cred_def_id - issuer.create_and_store_credential_definition.return_value = ( - cred_def_id, - cred_def_json, - ) - issuer.credential_definition_in_wallet.return_value = False - - schema_id = "schema_issuer_did:name:1.0" - tag = "default" - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - future = asyncio.Future() - future.set_result(mock.MagicMock(add_record=mock.CoroutineMock())) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did, mock.patch.object( - ledger, "get_indy_storage", mock.MagicMock() - ) as mock_get_storage: - mock_get_storage.return_value = future - async with ledger: - mock_wallet_get_public_did.return_value = None - with self.assertRaises(BadLedgerRequestError): - await ledger.create_and_send_credential_definition( - issuer, schema_id, None, tag - ) - mock_wallet_get_public_did.return_value = DIDInfo( - did=self.test_did, - verkey=self.test_verkey, - metadata=None, - method=SOV, - key_type=ED25519, - ) - mock_did = mock_wallet_get_public_did.return_value - ( - result_id, - result_def, - novel, - ) = await ledger.create_and_send_credential_definition( - issuer, schema_id, None, tag - ) - assert result_id == cred_def_id - assert novel - mock_get_schema.assert_called_once_with(schema_id) - mock_build_cred_def.assert_called_once_with(mock_did.did, cred_def_json) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.get_schema") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch( - "aries_cloudagent.ledger.indy.IndySdkLedger.fetch_credential_definition" - ) - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.find_all_records") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.add_record") - @mock.patch("indy.ledger.build_cred_def_request") - @mock.patch("indy.ledger.append_request_endorser") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.is_ledger_read_only") - async def test_send_credential_definition_endorse_only( - self, - mock_is_ledger_read_only, - mock_append_request_endorser, - mock_build_cred_def, - mock_add_record, - mock_find_all_records, - mock_submit, - mock_fetch_cred_def, - mock_close, - mock_open, - mock_get_schema, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_find_all_records.return_value = [] - mock_is_ledger_read_only.return_value = False - - mock_get_schema.return_value = {"seqNo": 999} - cred_def_id = f"{self.test_did}:3:CL:999:default" - cred_def_value = { - "primary": {"n": "...", "s": "...", "r": "...", "revocation": None} - } - cred_def = { - "ver": "1.0", - "id": cred_def_id, - "schemaId": "999", - "type": "CL", - "tag": "default", - "value": cred_def_value, - } - cred_def_json = json.dumps(cred_def) - - mock_fetch_cred_def.side_effect = [None, cred_def] - - issuer = mock.MagicMock(IndyIssuer) - issuer.make_credential_definition_id.return_value = cred_def_id - issuer.create_and_store_credential_definition.return_value = ( - cred_def_id, - cred_def_json, - ) - issuer.credential_definition_in_wallet.return_value = False - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - schema_id = "schema_issuer_did:name:1.0" - tag = "default" - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = DIDInfo( - self.test_did, - self.test_verkey, - None, - SOV, - ED25519, - ) - async with ledger: - ( - result_id, - signed_txn, - novel, - ) = await ledger.create_and_send_credential_definition( - issuer=issuer, - schema_id=schema_id, - signature_type=None, - tag=tag, - support_revocation=False, - write_ledger=False, - endorser_did=self.test_did, - ) - assert "signed_txn" in signed_txn - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.get_schema") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch( - "aries_cloudagent.ledger.indy.IndySdkLedger.fetch_credential_definition" - ) - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.find_all_records") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.add_record") - @mock.patch("indy.ledger.build_cred_def_request") - async def test_send_credential_definition_exists_in_ledger_and_wallet( - self, - mock_build_cred_def, - mock_add_record, - mock_find_all_records, - mock_submit, - mock_fetch_cred_def, - mock_close, - mock_open, - mock_get_schema, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_find_all_records.return_value = [] - - mock_get_schema.return_value = {"seqNo": 999} - cred_def_id = f"{self.test_did}:3:CL:999:default" - cred_def_value = { - "primary": {"n": "...", "s": "...", "r": "...", "revocation": None} - } - cred_def = { - "ver": "1.0", - "id": cred_def_id, - "schemaId": "999", - "type": "CL", - "tag": "default", - "value": cred_def_value, - } - cred_def_json = json.dumps(cred_def) - - mock_fetch_cred_def.return_value = {"mock": "cred-def"} - - issuer = mock.MagicMock(IndyIssuer) - issuer.make_credential_definition_id.return_value = cred_def_id - issuer.create_and_store_credential_definition.return_value = ( - cred_def_id, - cred_def_json, - ) - issuer.credential_definition_in_wallet.return_value = True - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - schema_id = "schema_issuer_did:name:1.0" - tag = "default" - future = asyncio.Future() - future.set_result(mock.MagicMock(add_record=mock.CoroutineMock())) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did, mock.patch.object( - ledger, "get_indy_storage", mock.MagicMock() - ) as mock_get_storage: - mock_get_storage.return_value = future - mock_wallet_get_public_did.return_value = DIDInfo( - did=self.test_did, - verkey=self.test_verkey, - metadata=None, - method=SOV, - key_type=ED25519, - ) - - async with ledger: - mock_did = mock_wallet_get_public_did.return_value - - ( - result_id, - result_def, - novel, - ) = await ledger.create_and_send_credential_definition( - issuer, schema_id, None, tag - ) - assert result_id == cred_def_id - assert not novel - - mock_wallet_get_public_did.assert_called_once_with() - mock_get_schema.assert_called_once_with(schema_id) - - mock_build_cred_def.assert_not_called() - mock_get_storage.assert_not_called() - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.get_schema") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - async def test_send_credential_definition_no_such_schema( - self, - mock_close, - mock_open, - mock_get_schema, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_get_schema.return_value = {} - - issuer = mock.MagicMock(IndyIssuer) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - schema_id = "schema_issuer_did:name:1.0" - tag = "default" - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - async with ledger: - with self.assertRaises(LedgerError): - await ledger.create_and_send_credential_definition( - issuer, schema_id, None, tag - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.get_schema") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch( - "aries_cloudagent.ledger.indy.IndySdkLedger.fetch_credential_definition" - ) - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.find_all_records") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.add_record") - @mock.patch("indy.ledger.build_cred_def_request") - async def test_send_credential_definition_offer_exception( - self, - mock_build_cred_def, - mock_add_record, - mock_find_all_records, - mock_submit, - mock_fetch_cred_def, - mock_close, - mock_open, - mock_get_schema, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_find_all_records.return_value = [] - - mock_get_schema.return_value = {"seqNo": 999} - - issuer = mock.MagicMock(IndyIssuer) - issuer.credential_definition_in_wallet.side_effect = IndyIssuerError( - "common IO error" - ) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - schema_id = "schema_issuer_did:name:1.0" - tag = "default" - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - async with ledger: - with self.assertRaises(LedgerError): - await ledger.create_and_send_credential_definition( - issuer, schema_id, None, tag - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.get_schema") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch( - "aries_cloudagent.ledger.indy.IndySdkLedger.fetch_credential_definition" - ) - async def test_send_credential_definition_cred_def_in_wallet_not_ledger( - self, - mock_fetch_cred_def, - mock_close, - mock_open, - mock_get_schema, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_get_schema.return_value = {"seqNo": 999} - cred_def_id = f"{self.test_did}:3:CL:999:default" - cred_def_value = { - "primary": {"n": "...", "s": "...", "r": "...", "revocation": None} - } - cred_def = { - "ver": "1.0", - "id": cred_def_id, - "schemaId": "999", - "type": "CL", - "tag": "default", - "value": cred_def_value, - } - cred_def_json = json.dumps(cred_def) - - mock_fetch_cred_def.return_value = {} - - issuer = mock.MagicMock(IndyIssuer) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - schema_id = "schema_issuer_did:name:1.0" - tag = "default" - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - async with ledger: - with self.assertRaises(LedgerError): - await ledger.create_and_send_credential_definition( - issuer, schema_id, None, tag - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.get_schema") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch( - "aries_cloudagent.ledger.indy.IndySdkLedger.fetch_credential_definition" - ) - async def test_send_credential_definition_cred_def_not_on_ledger_wallet_check_x( - self, - mock_fetch_cred_def, - mock_close, - mock_open, - mock_get_schema, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_get_schema.return_value = {"seqNo": 999} - cred_def_id = f"{self.test_did}:3:CL:999:default" - cred_def_value = { - "primary": {"n": "...", "s": "...", "r": "...", "revocation": None} - } - cred_def = { - "ver": "1.0", - "id": cred_def_id, - "schemaId": "999", - "type": "CL", - "tag": "default", - "value": cred_def_value, - } - cred_def_json = json.dumps(cred_def) - - mock_fetch_cred_def.return_value = {} - - issuer = mock.MagicMock(IndyIssuer) - issuer.credential_definition_in_wallet = mock.CoroutineMock( - side_effect=IndyIssuerError("dummy error") - ) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - schema_id = "schema_issuer_did:name:1.0" - tag = "default" - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - async with ledger: - with self.assertRaises(LedgerError) as context: - await ledger.create_and_send_credential_definition( - issuer, schema_id, None, tag - ) - assert "dummy error" in str(context.exception) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.get_schema") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch( - "aries_cloudagent.ledger.indy.IndySdkLedger.fetch_credential_definition" - ) - async def test_send_credential_definition_cred_def_not_on_ledger_nor_wallet_send_x( - self, - mock_fetch_cred_def, - mock_close, - mock_open, - mock_get_schema, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_get_schema.return_value = {"seqNo": 999} - cred_def_id = f"{self.test_did}:3:CL:999:default" - cred_def_value = { - "primary": {"n": "...", "s": "...", "r": "...", "revocation": None} - } - cred_def = { - "ver": "1.0", - "id": cred_def_id, - "schemaId": "999", - "type": "CL", - "tag": "default", - "value": cred_def_value, - } - cred_def_json = json.dumps(cred_def) - - mock_fetch_cred_def.return_value = {} - - issuer = mock.MagicMock(IndyIssuer) - issuer.credential_definition_in_wallet = mock.CoroutineMock(return_value=False) - issuer.create_and_store_credential_definition = mock.CoroutineMock( - side_effect=IndyIssuerError("dummy error") - ) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - schema_id = "schema_issuer_did:name:1.0" - tag = "default" - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - async with ledger: - with self.assertRaises(LedgerError) as context: - await ledger.create_and_send_credential_definition( - issuer, schema_id, None, tag - ) - assert "dummy error" in str(context.exception) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.get_schema") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch( - "aries_cloudagent.ledger.indy.IndySdkLedger.fetch_credential_definition" - ) - async def test_send_credential_definition_read_only( - self, - mock_fetch_cred_def, - mock_close, - mock_open, - mock_get_schema, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_get_schema.return_value = {"seqNo": 999} - cred_def_id = f"{self.test_did}:3:CL:999:default" - cred_def_value = { - "primary": {"n": "...", "s": "...", "r": "...", "revocation": None} - } - cred_def = { - "ver": "1.0", - "id": cred_def_id, - "schemaId": "999", - "type": "CL", - "tag": "default", - "value": cred_def_value, - } - cred_def_json = json.dumps(cred_def) - - mock_fetch_cred_def.return_value = {} - - issuer = mock.MagicMock(IndyIssuer) - issuer.credential_definition_in_wallet = mock.CoroutineMock(return_value=False) - issuer.create_and_store_credential_definition = mock.CoroutineMock( - return_value=("cred-def-id", "cred-def-json") - ) - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, read_only=True), self.profile - ) - schema_id = "schema_issuer_did:name:1.0" - tag = "default" - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - async with ledger: - with self.assertRaises(LedgerError) as context: - await ledger.create_and_send_credential_definition( - issuer, schema_id, None, tag - ) - assert "read only" in str(context.exception) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.get_schema") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch( - "aries_cloudagent.ledger.indy.IndySdkLedger.fetch_credential_definition" - ) - async def test_send_credential_definition_cred_def_on_ledger_not_in_wallet( - self, - mock_fetch_cred_def, - mock_close, - mock_open, - mock_get_schema, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_get_schema.return_value = {"seqNo": 999} - cred_def_id = f"{self.test_did}:3:CL:999:default" - cred_def_value = { - "primary": {"n": "...", "s": "...", "r": "...", "revocation": None} - } - cred_def = { - "ver": "1.0", - "id": cred_def_id, - "schemaId": "999", - "type": "CL", - "tag": "default", - "value": cred_def_value, - } - cred_def_json = json.dumps(cred_def) - - mock_fetch_cred_def.return_value = cred_def - - issuer = mock.MagicMock(IndyIssuer) - issuer.credential_definition_in_wallet = mock.CoroutineMock(return_value=False) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - schema_id = "schema_issuer_did:name:1.0" - tag = "default" - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - async with ledger: - with self.assertRaises(LedgerError): - await ledger.create_and_send_credential_definition( - issuer, schema_id, None, tag - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.get_schema") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch( - "aries_cloudagent.ledger.indy.IndySdkLedger.fetch_credential_definition" - ) - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.find_all_records") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.add_record") - @mock.patch("indy.ledger.build_cred_def_request") - async def test_send_credential_definition_on_ledger_in_wallet( - self, - mock_build_cred_def, - mock_add_record, - mock_find_all_records, - mock_submit, - mock_fetch_cred_def, - mock_close, - mock_open, - mock_get_schema, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_find_all_records.return_value = [] - - mock_get_schema.return_value = {"seqNo": 999} - cred_def_id = f"{self.test_did}:3:CL:999:default" - cred_def_value = { - "primary": {"n": "...", "s": "...", "r": "...", "revocation": None} - } - cred_def = { - "ver": "1.0", - "id": cred_def_id, - "schemaId": "999", - "type": "CL", - "tag": "default", - "value": cred_def_value, - } - cred_def_json = json.dumps(cred_def) - - mock_fetch_cred_def.return_value = cred_def - - issuer = mock.MagicMock(IndyIssuer) - issuer.make_credential_definition_id.return_value = cred_def_id - issuer.create_and_store_credential_definition.return_value = ( - cred_def_id, - cred_def_json, - ) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - schema_id = "schema_issuer_did:name:1.0" - tag = "default" - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - async with ledger: - mock_wallet_get_public_did.return_value = None - with self.assertRaises(BadLedgerRequestError): - await ledger.create_and_send_credential_definition( - issuer, schema_id, None, tag - ) - - mock_wallet_get_public_did.return_value = DIDInfo( - did=self.test_did, - verkey=self.test_verkey, - metadata=None, - method=SOV, - key_type=ED25519, - ) - mock_did = mock_wallet_get_public_did.return_value - - ( - result_id, - result_def, - novel, - ) = await ledger.create_and_send_credential_definition( - issuer, schema_id, None, tag - ) - assert result_id == cred_def_id - - mock_get_schema.assert_called_once_with(schema_id) - - mock_build_cred_def.assert_not_called() - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.get_schema") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch( - "aries_cloudagent.ledger.indy.IndySdkLedger.fetch_credential_definition" - ) - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.find_all_records") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.add_record") - @mock.patch("indy.ledger.build_cred_def_request") - async def test_send_credential_definition_create_cred_def_exception( - self, - mock_build_cred_def, - mock_add_record, - mock_find_all_records, - mock_submit, - mock_fetch_cred_def, - mock_close, - mock_open, - mock_get_schema, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_find_all_records.return_value = [] - - mock_get_schema.return_value = {"seqNo": 999} - cred_def_id = f"{self.test_did}:3:CL:999:default" - cred_def_value = { - "primary": {"n": "...", "s": "...", "r": "...", "revocation": None} - } - cred_def = { - "ver": "1.0", - "id": cred_def_id, - "schemaId": "999", - "type": "CL", - "tag": "default", - "value": cred_def_value, - } - cred_def_json = json.dumps(cred_def) - - mock_fetch_cred_def.return_value = None - - issuer = mock.MagicMock(IndyIssuer) - issuer.create_and_store_credential_definition.side_effect = IndyIssuerError( - "invalid structure" - ) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - schema_id = "schema_issuer_did:name:1.0" - tag = "default" - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = DIDInfo( - did=self.test_did, - verkey=self.test_verkey, - metadata=None, - method=SOV, - key_type=ED25519, - ) - async with ledger: - with self.assertRaises(LedgerError): - await ledger.create_and_send_credential_definition( - issuer, schema_id, None, tag - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_get_cred_def_request") - @mock.patch("indy.ledger.parse_get_cred_def_response") - async def test_get_credential_definition( - self, - mock_parse_get_cred_def_resp, - mock_build_get_cred_def_req, - mock_submit, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_parse_get_cred_def_resp.return_value = ( - None, - json.dumps({"result": {"seqNo": 1}}), - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = mock.CoroutineMock() - mock_did = mock_wallet_get_public_did.return_value - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, cache=InMemoryCache()), - self.profile, - ) - - async with ledger: - response = await ledger.get_credential_definition("cred_def_id") - mock_wallet_get_public_did.assert_called_once_with() - mock_build_get_cred_def_req.assert_called_once_with( - mock_did.did, "cred_def_id" - ) - mock_submit.assert_called_once_with( - mock_build_get_cred_def_req.return_value, sign_did=mock_did - ) - mock_parse_get_cred_def_resp.assert_called_once_with( - mock_submit.return_value - ) - assert response == json.loads( - mock_parse_get_cred_def_resp.return_value[1] - ) - response == await ledger.get_credential_definition( # cover get-from-cache - "cred_def_id" - ) - assert response == json.loads( - mock_parse_get_cred_def_resp.return_value[1] - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_get_cred_def_request") - @mock.patch("indy.ledger.parse_get_cred_def_response") - async def test_get_credential_definition_ledger_not_found( - self, - mock_parse_get_cred_def_resp, - mock_build_get_cred_def_req, - mock_submit, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - - mock_parse_get_cred_def_resp.side_effect = IndyError( - error_code=ErrorCode.LedgerNotFound, error_details={"message": "not today"} - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True), self.profile - ) - async with ledger: - response = await ledger.get_credential_definition("cred_def_id") - mock_did = mock_wallet_get_public_did.return_value - mock_wallet_get_public_did.assert_called_once_with() - mock_build_get_cred_def_req.assert_called_once_with( - mock_did.did, "cred_def_id" - ) - mock_submit.assert_called_once_with( - mock_build_get_cred_def_req.return_value, sign_did=mock_did - ) - mock_parse_get_cred_def_resp.assert_called_once_with( - mock_submit.return_value - ) - - assert response is None - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_get_cred_def_request") - @mock.patch("indy.ledger.parse_get_cred_def_response") - async def test_fetch_credential_definition_ledger_x( - self, - mock_parse_get_cred_def_resp, - mock_build_get_cred_def_req, - mock_submit, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - - mock_parse_get_cred_def_resp.side_effect = IndyError( - error_code=ErrorCode.CommonInvalidParam1, - error_details={"message": "not today"}, - ) - - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - with self.assertRaises(LedgerError) as context: - await ledger.fetch_credential_definition("cred_def_id") - assert "not today" in str(context.exception) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_key_for_did( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_submit.return_value = json.dumps( - {"result": {"data": json.dumps({"verkey": self.test_verkey})}} - ) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - response = await ledger.get_key_for_did(self.test_did) - - mock_build_get_nym_req.assert_called_once_with( - self.test_did, - ledger.did_to_nym(self.test_did), - ) - mock_submit.assert_called_once_with( - mock_build_get_nym_req.return_value, - sign_did=mock_wallet_get_public_did.return_value, - ) - assert response == self.test_verkey - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_attrib_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_endpoint_for_did( - self, mock_submit, mock_build_get_attrib_req, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - endpoint = "http://aries.ca" - mock_submit.return_value = json.dumps( - {"result": {"data": json.dumps({"endpoint": {"endpoint": endpoint}})}} - ) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - response = await ledger.get_endpoint_for_did(self.test_did) - - mock_build_get_attrib_req.assert_called_once_with( - self.test_did, - ledger.did_to_nym(self.test_did), - "endpoint", - None, - None, - ) - mock_submit.assert_called_once_with( - mock_build_get_attrib_req.return_value, - sign_did=mock_wallet_get_public_did.return_value, - ) - assert response == endpoint - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_attrib_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_endpoint_of_type_profile_for_did( - self, mock_submit, mock_build_get_attrib_req, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - endpoint = "http://company.com/masterdata" - endpoint_type = EndpointType.PROFILE - mock_submit.return_value = json.dumps( - { - "result": { - "data": json.dumps( - {"endpoint": {EndpointType.PROFILE.indy: endpoint}} - ) - } - } - ) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - response = await ledger.get_endpoint_for_did( - self.test_did, - endpoint_type, - ) - - mock_build_get_attrib_req.assert_called_once_with( - self.test_did, - ledger.did_to_nym(self.test_did), - "endpoint", - None, - None, - ) - mock_submit.assert_called_once_with( - mock_build_get_attrib_req.return_value, - sign_did=mock_wallet_get_public_did.return_value, - ) - assert response == endpoint - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_attrib_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_all_endpoints_for_did( - self, mock_submit, mock_build_get_attrib_req, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - profile_endpoint = "http://company.com/masterdata" - default_endpoint = "http://agent.company.com" - data_json = json.dumps( - {"endpoint": {"endpoint": default_endpoint, "profile": profile_endpoint}} - ) - mock_submit.return_value = json.dumps({"result": {"data": data_json}}) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - response = await ledger.get_all_endpoints_for_did(self.test_did) - - mock_build_get_attrib_req.assert_called_once_with( - self.test_did, - ledger.did_to_nym(self.test_did), - "endpoint", - None, - None, - ) - mock_submit.assert_called_once_with( - mock_build_get_attrib_req.return_value, - sign_did=mock_wallet_get_public_did.return_value, - ) - assert response == json.loads(data_json).get("endpoint") - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_attrib_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_all_endpoints_for_did_none( - self, mock_submit, mock_build_get_attrib_req, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - profile_endpoint = "http://company.com/masterdata" - default_endpoint = "http://agent.company.com" - mock_submit.return_value = json.dumps({"result": {"data": None}}) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - response = await ledger.get_all_endpoints_for_did(self.test_did) - - mock_build_get_attrib_req.assert_called_once_with( - self.test_did, - ledger.did_to_nym(self.test_did), - "endpoint", - None, - None, - ) - mock_submit.assert_called_once_with( - mock_build_get_attrib_req.return_value, - sign_did=mock_wallet_get_public_did.return_value, - ) - assert response is None - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_attrib_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_endpoint_for_did_address_none( - self, mock_submit, mock_build_get_attrib_req, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_submit.return_value = json.dumps( - {"result": {"data": json.dumps({"endpoint": None})}} - ) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - response = await ledger.get_endpoint_for_did(self.test_did) - - mock_build_get_attrib_req.assert_called_once_with( - self.test_did, - ledger.did_to_nym(self.test_did), - "endpoint", - None, - None, - ) - mock_submit.assert_called_once_with( - mock_build_get_attrib_req.return_value, - sign_did=mock_wallet_get_public_did.return_value, - ) - assert response is None - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_attrib_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_endpoint_for_did_no_endpoint( - self, mock_submit, mock_build_get_attrib_req, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_submit.return_value = json.dumps({"result": {"data": None}}) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - response = await ledger.get_endpoint_for_did(self.test_did) - - mock_build_get_attrib_req.assert_called_once_with( - self.test_did, - ledger.did_to_nym(self.test_did), - "endpoint", - None, - None, - ) - mock_submit.assert_called_once_with( - mock_build_get_attrib_req.return_value, - sign_did=mock_wallet_get_public_did.return_value, - ) - assert response is None - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_attrib_request") - @mock.patch("indy.ledger.build_attrib_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.is_ledger_read_only") - async def test_update_endpoint_for_did( - self, - mock_is_ledger_read_only, - mock_submit, - mock_build_attrib_req, - mock_build_get_attrib_req, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - endpoint = ["http://old.aries.ca", "http://new.aries.ca"] - mock_is_ledger_read_only.return_value = False - mock_submit.side_effect = [ - json.dumps( - { - "result": { - "data": json.dumps({"endpoint": {"endpoint": endpoint[i]}}) - } - } - ) - for i in range(len(endpoint)) - ] - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - response = await ledger.update_endpoint_for_did( - self.test_did, endpoint[1] - ) - - mock_build_get_attrib_req.assert_called_once_with( - self.test_did, - ledger.did_to_nym(self.test_did), - "endpoint", - None, - None, - ) - mock_submit.assert_has_calls( - [ - mock.call( - mock_build_get_attrib_req.return_value, - sign_did=mock_wallet_get_public_did.return_value, - ), - mock.call(mock_build_attrib_req.return_value, True, True), - ] - ) - assert response - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @pytest.mark.asyncio - async def test_construct_attr_json_with_routing_keys(self, mock_close, mock_open): - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - async with ledger: - attr_json = await ledger._construct_attr_json( - "https://url", - EndpointType.ENDPOINT, - routing_keys=["3YJCx3TqotDWFGv7JMR5erEvrmgu5y4FDqjR7sKWxgXn"], - ) - assert attr_json == json.dumps( - { - "endpoint": { - "endpoint": "https://url", - "routingKeys": ["3YJCx3TqotDWFGv7JMR5erEvrmgu5y4FDqjR7sKWxgXn"], - } - } - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @pytest.mark.asyncio - async def test_construct_attr_json_with_routing_keys_all_exist_endpoints( - self, mock_close, mock_open - ): - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - async with ledger: - attr_json = await ledger._construct_attr_json( - "https://url", - EndpointType.ENDPOINT, - all_exist_endpoints={"profile": "https://endpoint/profile"}, - routing_keys=["3YJCx3TqotDWFGv7JMR5erEvrmgu5y4FDqjR7sKWxgXn"], - ) - assert attr_json == json.dumps( - { - "endpoint": { - "profile": "https://endpoint/profile", - "endpoint": "https://url", - "routingKeys": ["3YJCx3TqotDWFGv7JMR5erEvrmgu5y4FDqjR7sKWxgXn"], - } - } - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_attrib_request") - @mock.patch("indy.ledger.build_attrib_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.is_ledger_read_only") - @pytest.mark.asyncio - async def test_update_endpoint_for_did_calls_attr_json( - self, - mock_is_ledger_read_only, - mock_submit, - mock_build_attrib_req, - mock_build_get_attrib_req, - mock_close, - mock_open, - ): - routing_keys = ["3YJCx3TqotDWFGv7JMR5erEvrmgu5y4FDqjR7sKWxgXn"] - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - mock_is_ledger_read_only.return_value = False - async with ledger: - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did, mock.patch.object( - ledger, - "_construct_attr_json", - mock.CoroutineMock( - return_value=json.dumps( - { - "endpoint": { - "endpoint": { - "endpoint": "https://url", - "routingKeys": [], - } - } - } - ) - ), - ) as mock_construct_attr_json, mock.patch.object( - ledger, - "get_all_endpoints_for_did", - mock.CoroutineMock(return_value={}), - ), mock.patch.object( - ledger, "did_to_nym" - ): - mock_wallet_get_public_did.return_value = self.test_did_info - await ledger.update_endpoint_for_did( - mock_wallet_get_public_did, - "https://url", - EndpointType.ENDPOINT, - routing_keys=routing_keys, - ) - mock_construct_attr_json.assert_called_once_with( - "https://url", EndpointType.ENDPOINT, {}, routing_keys - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_attrib_request") - @mock.patch("indy.ledger.build_attrib_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.is_ledger_read_only") - async def test_update_endpoint_for_did_no_prior_endpoints( - self, - mock_is_ledger_read_only, - mock_submit, - mock_build_attrib_req, - mock_build_get_attrib_req, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - endpoint = "http://new.aries.ca" - mock_is_ledger_read_only.return_value = False - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - with mock.patch.object( - ledger, "get_all_endpoints_for_did", mock.CoroutineMock() - ) as mock_get_all: - mock_get_all.return_value = None - response = await ledger.update_endpoint_for_did( - self.test_did, endpoint - ) - - mock_build_get_attrib_req.assert_called_once_with( - self.test_did, - ledger.did_to_nym(self.test_did), - "endpoint", - None, - None, - ) - mock_submit.assert_has_calls( - [ - mock.call(mock_build_attrib_req.return_value, True, True), - ] - ) - assert response - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_attrib_request") - @mock.patch("indy.ledger.build_attrib_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.is_ledger_read_only") - async def test_update_endpoint_of_type_profile_for_did( - self, - mock_is_ledger_read_only, - mock_submit, - mock_build_attrib_req, - mock_build_get_attrib_req, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - endpoint = ["http://company.com/oldProfile", "http://company.com/newProfile"] - endpoint_type = EndpointType.PROFILE - mock_is_ledger_read_only.return_value = False - mock_submit.side_effect = [ - json.dumps( - { - "result": { - "data": json.dumps( - {"endpoint": {endpoint_type.indy: endpoint[i]}} - ) - } - } - ) - for i in range(len(endpoint)) - ] - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - # ledger = mock.patch.object( - # ledger, - # "is_ledger_read_only", - # mock.CoroutineMock(return_value=False), - # ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - response = await ledger.update_endpoint_for_did( - self.test_did, endpoint[1], endpoint_type - ) - - mock_build_get_attrib_req.assert_called_once_with( - self.test_did, - ledger.did_to_nym(self.test_did), - "endpoint", - None, - None, - ) - mock_submit.assert_has_calls( - [ - mock.call( - mock_build_get_attrib_req.return_value, - sign_did=mock_wallet_get_public_did.return_value, - ), - mock.call(mock_build_attrib_req.return_value, True, True), - ] - ) - assert response - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_attrib_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_update_endpoint_for_did_duplicate( - self, mock_submit, mock_build_get_attrib_req, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - endpoint = "http://aries.ca" - mock_submit.return_value = json.dumps( - {"result": {"data": json.dumps({"endpoint": {"endpoint": endpoint}})}} - ) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - response = await ledger.update_endpoint_for_did(self.test_did, endpoint) - - mock_build_get_attrib_req.assert_called_once_with( - self.test_did, - ledger.did_to_nym(self.test_did), - "endpoint", - None, - None, - ) - mock_submit.assert_called_once_with( - mock_build_get_attrib_req.return_value, - sign_did=mock_wallet_get_public_did.return_value, - ) - assert not response - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_attrib_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_update_endpoint_for_did_read_only( - self, mock_submit, mock_build_get_attrib_req, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - endpoint = "http://aries.ca" - mock_submit.return_value = json.dumps( - {"result": {"data": json.dumps({"endpoint": {"endpoint": endpoint}})}} - ) - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, read_only=True), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - with self.assertRaises(LedgerError) as context: - await ledger.update_endpoint_for_did( - self.test_did, "distinct endpoint" - ) - assert "read only" in str(context.exception) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.is_ledger_read_only") - async def test_register_nym( - self, - mock_is_ledger_read_only, - mock_submit, - mock_build_nym_req, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_is_ledger_read_only.return_value = False - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did, mock.patch.object( - IndySdkWallet, "get_local_did" - ) as mock_wallet_get_local_did, mock.patch.object( - IndySdkWallet, "replace_local_did_metadata" - ) as mock_wallet_replace_local_did_metadata: - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True), self.profile - ) - mock_wallet_get_public_did.return_value = self.test_did_info - mock_wallet_get_local_did.return_value = self.test_did_info - mock_wallet_replace_local_did_metadata.return_value = mock.CoroutineMock() - async with ledger: - await ledger.register_nym( - self.test_did, - self.test_verkey, - "alias", - None, - ) - mock_build_nym_req.assert_called_once_with( - self.test_did, - self.test_did, - self.test_verkey, - "alias", - None, - ) - mock_submit.assert_called_once_with( - mock_build_nym_req.return_value, - True, - True, - sign_did=mock_wallet_get_public_did.return_value, - ) - mock_wallet_replace_local_did_metadata.assert_called_once_with( - self.test_did_info.did, - { - "test": "test", - **DIDPosture.POSTED.metadata, - }, - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - async def test_register_nym_read_only(self, mock_close, mock_open): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, read_only=True), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - with self.assertRaises(LedgerError) as context: - await ledger.register_nym( - self.test_did, - self.test_verkey, - "alias", - None, - ) - assert "read only" in str(context.exception) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.is_ledger_read_only") - async def test_register_nym_no_public_did( - self, - mock_is_ledger_read_only, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock( - type="indy", - get_local_did=mock.CoroutineMock(), - replace_local_did_metadata=mock.CoroutineMock(), - ) - mock_is_ledger_read_only.return_value = False - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = None - async with ledger: - with self.assertRaises(WalletNotFoundError): - await ledger.register_nym( - self.test_did, - self.test_verkey, - "alias", - None, - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.is_ledger_read_only") - async def test_register_nym_ledger_x( - self, - mock_is_ledger_read_only, - mock_submit, - mock_build_nym_req, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - mock_build_nym_req.side_effect = IndyError( - error_code=ErrorCode.CommonInvalidParam1, - error_details={"message": "not today"}, - ) - mock_is_ledger_read_only.return_value = False - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - with self.assertRaises(LedgerError): - await ledger.register_nym( - self.test_did, - self.test_verkey, - "alias", - None, - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.is_ledger_read_only") - async def test_register_nym_steward_register_others_did( - self, - mock_is_ledger_read_only, - mock_submit, - mock_build_nym_req, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_is_ledger_read_only.return_value = False - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did, mock.patch.object( - IndySdkWallet, "get_local_did" - ) as mock_wallet_get_local_did, mock.patch.object( - IndySdkWallet, "replace_local_did_metadata" - ) as mock_wallet_replace_local_did_metadata: - mock_wallet_get_public_did.return_value = self.test_did_info - mock_wallet_get_local_did.side_effect = WalletNotFoundError() - mock_wallet_replace_local_did_metadata.return_value = mock.CoroutineMock() - async with ledger: - await ledger.register_nym( - self.test_did, - self.test_verkey, - "alias", - None, - ) - mock_build_nym_req.assert_called_once_with( - self.test_did, - self.test_did, - self.test_verkey, - "alias", - None, - ) - mock_submit.assert_called_once_with( - mock_build_nym_req.return_value, - True, - True, - sign_did=mock_wallet_get_public_did.return_value, - ) - mock_wallet_replace_local_did_metadata.assert_not_called() - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_nym_role( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_submit.return_value = json.dumps( - { - "result": { - "dest": "GjZWsBLgZCR18aL468JAT7w9CZRiBnpxUPPgyQxh4voa", - "txnTime": 1597858571, - "reqId": 1597858571783588400, - "state_proof": { - "root_hash": "7K26MUQt8E2X1vsRJUmc2298VtY8YC5BSDfT5CRJeUDi", - "proof_nodes": "+QHo...", - "multi_signature": { - "participants": ["Node4", "Node3", "Node2"], - "value": { - "state_root_hash": "7K2...", - "pool_state_root_hash": "GT8...", - "ledger_id": 1, - "txn_root_hash": "Hnr...", - "timestamp": 1597858571, - }, - "signature": "QuX...", - }, - }, - "data": json.dumps( - { - "dest": "GjZWsBLgZCR18aL468JAT7w9CZRiBnpxUPPgyQxh4voa", - "identifier": "V4SGRU86Z58d6TV7PBUe6f", - "role": 101, - "seqNo": 11, - "txnTime": 1597858571, - "verkey": "GjZWsBLgZCR18aL468JAT7w9CZRiBnpxUPPgyQxh4voa", - } - ), - "seqNo": 11, - "identifier": "GjZWsBLgZCR18aL468JAT7w9CZRiBnpxUPPgyQxh4voa", - "type": "105", - }, - "op": "REPLY", - } - ) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - assert await ledger.get_nym_role(self.test_did) == Role.ENDORSER - mock_build_get_nym_req.assert_called_once_with( - self.test_did, - self.test_did, - ) - mock_submit.assert_called_once_with(mock_build_get_nym_req.return_value) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - async def test_get_nym_role_indy_x( - self, mock_build_get_nym_req, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_build_get_nym_req.side_effect = IndyError( - error_code=ErrorCode.CommonInvalidParam1, - error_details={"message": "not today"}, - ) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - with self.assertRaises(LedgerError) as context: - await ledger.get_nym_role(self.test_did) - assert "not today" in context.exception.message - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_nym_role_did_not_public_x( - self, mock_submit, mock_build_get_nym_req, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_submit.return_value = json.dumps( - { - "result": { - "dest": "GjZWsBLgZCR18aL468JAT7w9CZRiBnpxUPPgyQxh4voa", - "txnTime": 1597858571, - "reqId": 1597858571783588400, - "state_proof": { - "root_hash": "7K26MUQt8E2X1vsRJUmc2298VtY8YC5BSDfT5CRJeUDi", - "proof_nodes": "+QHo...", - "multi_signature": { - "participants": ["Node4", "Node3", "Node2"], - "value": { - "state_root_hash": "7K2...", - "pool_state_root_hash": "GT8...", - "ledger_id": 1, - "txn_root_hash": "Hnr...", - "timestamp": 1597858571, - }, - "signature": "QuX...", - }, - }, - "data": json.dumps(None), - "seqNo": 11, - "identifier": "GjZWsBLgZCR18aL468JAT7w9CZRiBnpxUPPgyQxh4voa", - "type": "105", - }, - "op": "REPLY", - } - ) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - with self.assertRaises(BadLedgerRequestError): - await ledger.get_nym_role(self.test_did) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("indy.ledger.build_get_txn_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.register_nym") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_rotate_public_did_keypair( - self, - mock_submit, - mock_register_nym, - mock_build_get_txn_request, - mock_build_get_nym_request, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_submit.side_effect = [ - json.dumps({"result": {"data": json.dumps({"seqNo": 1234})}}), - json.dumps( - { - "result": { - "data": {"txn": {"data": {"role": "101", "alias": "Billy"}}} - } - } - ), - ] - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did, mock.patch.object( - IndySdkWallet, "rotate_did_keypair_start", autospec=True - ) as mock_wallet_rotate_did_keypair_start, mock.patch.object( - IndySdkWallet, "rotate_did_keypair_apply", autospec=True - ) as mock_wallet_rotate_did_keypair_apply: - mock_wallet_get_public_did.return_value = self.test_did_info - mock_wallet_rotate_did_keypair_start.return_value = self.test_verkey - mock_wallet_rotate_did_keypair_apply.return_value = None - async with ledger: - await ledger.rotate_public_did_keypair() - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_rotate_public_did_keypair_no_nym( - self, mock_submit, mock_build_get_nym_request, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_submit.return_value = json.dumps({"result": {"data": json.dumps(None)}}) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did, mock.patch.object( - IndySdkWallet, "rotate_did_keypair_start", autospec=True - ) as mock_wallet_rotate_did_keypair_start, mock.patch.object( - IndySdkWallet, "rotate_did_keypair_apply", autospec=True - ) as mock_wallet_rotate_did_keypair_apply: - mock_wallet_get_public_did.return_value = self.test_did_info - mock_wallet_rotate_did_keypair_start.return_value = self.test_verkey - mock_wallet_rotate_did_keypair_apply.return_value = None - async with ledger: - with self.assertRaises(BadLedgerRequestError): - await ledger.rotate_public_did_keypair() - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_nym_request") - @mock.patch("indy.ledger.build_get_txn_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.register_nym") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_rotate_public_did_keypair_corrupt_nym_txn( - self, - mock_submit, - mock_register_nym, - mock_build_get_txn_request, - mock_build_get_nym_request, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_submit.side_effect = [ - json.dumps({"result": {"data": json.dumps({"seqNo": 1234})}}), - json.dumps({"result": {"data": None}}), - ] - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did, mock.patch.object( - IndySdkWallet, "rotate_did_keypair_start", autospec=True - ) as mock_wallet_rotate_did_keypair_start, mock.patch.object( - IndySdkWallet, "rotate_did_keypair_apply", autospec=True - ) as mock_wallet_rotate_did_keypair_apply: - mock_wallet_get_public_did.return_value = self.test_did_info - mock_wallet_rotate_did_keypair_start.return_value = self.test_verkey - mock_wallet_rotate_did_keypair_apply.return_value = None - async with ledger: - with self.assertRaises(BadLedgerRequestError): - await ledger.rotate_public_did_keypair() - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_get_revoc_reg_def_request") - @mock.patch("indy.ledger.parse_get_revoc_reg_def_response") - async def test_get_revoc_reg_def( - self, - mock_indy_parse_get_rrdef_resp, - mock_indy_build_get_rrdef_req, - mock_submit, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_indy_parse_get_rrdef_resp.return_value = ( - "rr-id", - json.dumps({"...": "..."}), - ) - mock_submit.return_value = json.dumps({"result": {"txnTime": 1234567890}}) - - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, read_only=True), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - result = await ledger.get_revoc_reg_def("rr-id") - assert result == {"...": "...", "txnTime": 1234567890} - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_get_revoc_reg_def_request") - async def test_get_revoc_reg_def_indy_x( - self, mock_indy_build_get_rrdef_req, mock_submit, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - mock_indy_build_get_rrdef_req.side_effect = IndyError( - error_code=ErrorCode.CommonInvalidParam1, - error_details={"message": "not today"}, - ) - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, read_only=True), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - with self.assertRaises(IndyError) as context: - await ledger.get_revoc_reg_def("rr-id") - assert "not today" in context.exception.message - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_get_revoc_reg_request") - @mock.patch("indy.ledger.parse_get_revoc_reg_response") - async def test_get_revoc_reg_entry( - self, - mock_indy_parse_get_rr_resp, - mock_indy_build_get_rr_req, - mock_submit, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_indy_parse_get_rr_resp.return_value = ( - "rr-id", - '{"hello": "world"}', - 1234567890, - ) - - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, read_only=True), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - (result, _) = await ledger.get_revoc_reg_entry("rr-id", 1234567890) - assert result == {"hello": "world"} - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_get_revoc_reg_request") - @mock.patch("indy.ledger.parse_get_revoc_reg_response") - async def test_get_revoc_reg_entry_x( - self, - mock_indy_parse_get_rr_resp, - mock_indy_build_get_rr_req, - mock_submit, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_indy_parse_get_rr_resp.side_effect = IndyError( - error_code=ErrorCode.PoolLedgerTimeout, - error_details={"message": "bye"}, - ) - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, read_only=True), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - with self.assertRaises(LedgerError): - async with ledger: - await ledger.get_revoc_reg_entry("rr-id", 1234567890) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_get_revoc_reg_delta_request") - @mock.patch("indy.ledger.parse_get_revoc_reg_delta_response") - async def test_get_revoc_reg_delta( - self, - mock_indy_parse_get_rrd_resp, - mock_indy_build_get_rrd_req, - mock_submit, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_indy_parse_get_rrd_resp.return_value = ( - "rr-id", - '{"hello": "world"}', - 1234567890, - ) - - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, read_only=True), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - (result, _) = await ledger.get_revoc_reg_delta("rr-id") - assert result == {"hello": "world"} - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_revoc_reg_def_request") - async def test_send_revoc_reg_def_public_did( - self, mock_indy_build_rrdef_req, mock_submit, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_indy_build_rrdef_req.return_value = '{"hello": "world"}' - - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, read_only=True), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did, mock.patch.object( - IndySdkWallet, "get_local_did" - ) as mock_wallet_get_local_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - await ledger.send_revoc_reg_def({"rr": "def"}, issuer_did=None) - mock_wallet_get_public_did.assert_called_once() - assert not mock_wallet_get_local_did.called - mock_submit.assert_called_once_with( - mock_indy_build_rrdef_req.return_value, - True, - sign_did=self.test_did_info, - write_ledger=True, - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_revoc_reg_def_request") - async def test_send_revoc_reg_def_local_did( - self, mock_indy_build_rrdef_req, mock_submit, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_indy_build_rrdef_req.return_value = '{"hello": "world"}' - - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, read_only=True), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_local_did" - ) as mock_wallet_get_local_did: - mock_wallet_get_local_did.return_value = self.test_did_info - async with ledger: - await ledger.send_revoc_reg_def( - {"rr": "def"}, - issuer_did=self.test_did, - ) - mock_wallet_get_local_did.assert_called_once_with(self.test_did) - mock_submit.assert_called_once_with( - mock_indy_build_rrdef_req.return_value, - True, - sign_did=self.test_did_info, - write_ledger=True, - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_revoc_reg_def_request") - async def test_send_revoc_reg_def_x_no_did( - self, mock_indy_build_rrdef_req, mock_submit, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_indy_build_rrdef_req.return_value = '{"hello": "world"}' - - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, read_only=True), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_local_did" - ) as mock_wallet_get_local_did: - mock_wallet_get_local_did.return_value = None - async with ledger: - with self.assertRaises(LedgerTransactionError) as context: - await ledger.send_revoc_reg_def( - {"rr": "def"}, - issuer_did=self.test_did, - ) - assert "No issuer DID found for revocation registry definition" in str( - context.exception - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_revoc_reg_entry_request") - async def test_send_revoc_reg_entry_public_did( - self, mock_indy_build_rre_req, mock_submit, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_indy_build_rre_req.return_value = '{"hello": "world"}' - - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, read_only=True), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did, mock.patch.object( - IndySdkWallet, "get_local_did" - ) as mock_wallet_get_local_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - await ledger.send_revoc_reg_entry( - "rr-id", "CL_ACCUM", {"rev-reg": "entry"}, issuer_did=None - ) - mock_wallet_get_public_did.assert_called_once() - assert not mock_wallet_get_local_did.called - mock_submit.assert_called_once_with( - mock_indy_build_rre_req.return_value, - True, - sign_did=self.test_did_info, - write_ledger=True, - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_revoc_reg_entry_request") - async def test_send_revoc_reg_entry_local_did( - self, mock_indy_build_rre_req, mock_submit, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_indy_build_rre_req.return_value = '{"hello": "world"}' - - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, read_only=True), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_local_did" - ) as mock_wallet_get_local_did: - mock_wallet_get_local_did.return_value = self.test_did_info - async with ledger: - result = await ledger.send_revoc_reg_entry( - "rr-id", - "CL_ACCUM", - {"rev-reg": "entry"}, - issuer_did=self.test_did, - ) - mock_wallet_get_local_did.assert_called_once_with(self.test_did) - mock_submit.assert_called_once_with( - mock_indy_build_rre_req.return_value, - True, - sign_did=self.test_did_info, - write_ledger=True, - ) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - @mock.patch("indy.ledger.build_revoc_reg_entry_request") - async def test_send_revoc_reg_entry_x_no_did( - self, mock_indy_build_rre_req, mock_submit, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - mock_indy_build_rre_req.return_value = '{"hello": "world"}' - - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, read_only=True), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_local_did" - ) as mock_wallet_get_local_did: - mock_wallet_get_local_did.return_value = None - async with ledger: - with self.assertRaises(LedgerTransactionError) as context: - await ledger.send_revoc_reg_entry( - "rr-id", - "CL_ACCUM", - {"rev-reg": "entry"}, - issuer_did=self.test_did, - ) - assert "No issuer DID found for revocation registry entry" in str( - context.exception - ) - - @mock.patch("indy.pool.open_pool_ledger") - @mock.patch("indy.pool.close_pool_ledger") - async def test_taa_digest_bad_value( - self, - mock_close_pool, - mock_open_ledger, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - with self.assertRaises(ValueError): - await ledger.taa_digest(None, None) - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("indy.ledger.build_get_acceptance_mechanisms_request") - @mock.patch("indy.ledger.build_get_txn_author_agreement_request") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger._submit") - async def test_get_txn_author_agreement( - self, - mock_submit, - mock_build_get_taa_req, - mock_build_get_acc_mech_req, - mock_close, - mock_open, - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - txn_result_data = {"text": "text", "version": "1.0"} - mock_submit.side_effect = [ - json.dumps({"result": {"data": txn_result_data}}) for i in range(2) - ] - ledger = IndySdkLedger(IndySdkLedgerPool("name", checked=True), self.profile) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - response = await ledger.get_txn_author_agreement(reload=True) - - mock_build_get_acc_mech_req.assert_called_once_with( - self.test_did, None, None - ) - mock_build_get_taa_req.assert_called_once_with( - self.test_did, - None, - ) - mock_submit.assert_has_calls( - [ - mock.call( - mock_build_get_acc_mech_req.return_value, - sign_did=mock_wallet_get_public_did.return_value, - ), - mock.call( - mock_build_get_taa_req.return_value, - sign_did=mock_wallet_get_public_did.return_value, - ), - ] - ) - assert response == { - "aml_record": txn_result_data, - "taa_record": { - **txn_result_data, - "digest": ledger.taa_digest( - txn_result_data["version"], txn_result_data["text"] - ), - }, - "taa_required": True, - } - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.add_record") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.find_all_records") - async def test_accept_and_get_latest_txn_author_agreement( - self, mock_find_all_records, mock_add_record, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, cache=InMemoryCache()), self.profile - ) - - accept_time = ledger.taa_rough_timestamp() - taa_record = { - "text": "text", - "version": "1.0", - "digest": "abcd1234", - } - acceptance = { - "text": taa_record["text"], - "version": taa_record["version"], - "digest": taa_record["digest"], - "mechanism": "dummy", - "time": accept_time, - } - - mock_find_all_records.return_value = [ - StorageRecord( - TAA_ACCEPTED_RECORD_TYPE, - json.dumps(acceptance), - {"pool_name": ledger.pool_name}, - ) - ] - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - await ledger.accept_txn_author_agreement( - taa_record=taa_record, mechanism="dummy", accept_time=None - ) - - await ledger.pool.cache.clear( - f"{TAA_ACCEPTED_RECORD_TYPE}::{ledger.pool_name}" - ) - for i in range(2): # populate, then get from, cache - response = await ledger.get_latest_txn_author_acceptance() - assert response == acceptance - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.storage.indy.IndySdkStorage.find_all_records") - async def test_get_latest_txn_author_agreement_none( - self, mock_find_all_records, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, cache=InMemoryCache()), self.profile - ) - - mock_find_all_records.return_value = [] - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - await ledger.pool.cache.clear( - f"{TAA_ACCEPTED_RECORD_TYPE}::{ledger.pool_name}" - ) - response = await ledger.get_latest_txn_author_acceptance() - assert response == {} - - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_open") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedgerPool.context_close") - @mock.patch("aries_cloudagent.ledger.indy.IndySdkLedger.get_schema") - async def test_credential_definition_id2schema_id( - self, mock_get_schema, mock_close, mock_open - ): - mock_wallet = mock.MagicMock() - self.session.context.injector.bind_provider(BaseWallet, mock_wallet) - S_ID = f"{self.test_did}:2:favourite_drink:1.0" - SEQ_NO = "9999" - mock_get_schema.return_value = {"id": S_ID} - - ledger = IndySdkLedger( - IndySdkLedgerPool("name", checked=True, cache=InMemoryCache()), self.profile - ) - with mock.patch.object( - IndySdkWallet, "get_public_did" - ) as mock_wallet_get_public_did: - mock_wallet_get_public_did.return_value = self.test_did_info - async with ledger: - s_id_short = await ledger.credential_definition_id2schema_id( - f"{self.test_did}:3:CL:{SEQ_NO}:tag" - ) - - mock_get_schema.assert_called_once_with(SEQ_NO) - - assert s_id_short == S_ID - s_id_long = await ledger.credential_definition_id2schema_id( - f"{self.test_did}:3:CL:{s_id_short}:tag" - ) - assert s_id_long == s_id_short - - def test_error_handler(self): - try: # with self.assertRaises() makes a copy of exception, loses traceback! - with IndyErrorHandler("message", LedgerTransactionError): - try: - 1 / 0 - except ZeroDivisionError as zx: - ix = IndyError(error_code=1, error_details={"message": "bye"}) - ix.__traceback__ = zx.__traceback__ - raise ix - except LedgerTransactionError as err: - assert type(err) == LedgerTransactionError - assert type(err.__cause__) == IndyError - assert err.__traceback__ - assert "bye" in err.message diff --git a/aries_cloudagent/ledger/tests/test_indy_vdr.py b/aries_cloudagent/ledger/tests/test_indy_vdr.py index 9b33cff259..98ecc12ced 100644 --- a/aries_cloudagent/ledger/tests/test_indy_vdr.py +++ b/aries_cloudagent/ledger/tests/test_indy_vdr.py @@ -972,6 +972,56 @@ async def test_get_revoc_reg_delta( 1234567890, ) + @pytest.mark.asyncio + async def test_get_revoc_reg_delta_without_accum_to( + self, + ledger: IndyVdrLedger, + ): + async with ledger: + reg_id = ( + "55GkHamhTU1ZbTbV2ab9DE:4:55GkHamhTU1ZbTbV2ab9DE:3:CL:99:tag:CL_ACCUM:0" + ) + ledger.pool_handle.submit_request.side_effect = [ + # First call to get_revoc_reg_delta + { + "data": { + "value": {}, + "revocRegDefId": reg_id, + }, + }, + # Get registry with test_get_revoc_reg_entry + { + "data": { + "id": reg_id, + "txnTime": 1234567890, + "value": "...", + "revocRegDefId": reg_id, + }, + }, + # Second call to get_revoc_reg_delta + { + "data": { + "value": { + "accum_to": { + "value": {"accum": "ACCUM"}, + "txnTime": 1234567890, + }, + "issued": [1, 2], + "revoked": [3, 4], + }, + "revocRegDefId": reg_id, + }, + }, + ] + result = await ledger.get_revoc_reg_delta(reg_id) + assert result == ( + { + "ver": "1.0", + "value": {"accum": "ACCUM", "issued": [1, 2], "revoked": [3, 4]}, + }, + 1234567890, + ) + @pytest.mark.asyncio async def test_send_revoc_reg_def( self, diff --git a/aries_cloudagent/ledger/tests/test_routes.py b/aries_cloudagent/ledger/tests/test_routes.py index 4347823376..69992e7d2a 100644 --- a/aries_cloudagent/ledger/tests/test_routes.py +++ b/aries_cloudagent/ledger/tests/test_routes.py @@ -1,30 +1,33 @@ from typing import Tuple - from unittest import IsolatedAsyncioTestCase + from aries_cloudagent.tests import mock +from ...connections.models.conn_record import ConnRecord from ...core.in_memory import InMemoryProfile from ...ledger.base import BaseLedger from ...ledger.endpoint_type import EndpointType -from ...ledger.multiple_ledger.ledger_requests_executor import ( - IndyLedgerRequestsExecutor, -) from ...ledger.multiple_ledger.base_manager import ( BaseMultipleLedgerManager, ) +from ...ledger.multiple_ledger.ledger_requests_executor import ( + IndyLedgerRequestsExecutor, +) from ...multitenant.base import BaseMultitenantManager from ...multitenant.manager import MultitenantManager - from .. import routes as test_module -from ..indy import Role -from ...connections.models.conn_record import ConnRecord +from ..indy_vdr import Role class TestLedgerRoutes(IsolatedAsyncioTestCase): def setUp(self): self.ledger = mock.create_autospec(BaseLedger) self.ledger.pool_name = "pool.0" - self.profile = InMemoryProfile.test_profile() + self.profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) self.context = self.profile.context setattr(self.context, "profile", self.profile) self.profile.context.injector.bind_instance(BaseLedger, self.ledger) @@ -37,6 +40,7 @@ def setUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) self.test_did = "did" diff --git a/aries_cloudagent/messaging/agent_message.py b/aries_cloudagent/messaging/agent_message.py index 8a311aae5a..e9bcb3031b 100644 --- a/aries_cloudagent/messaging/agent_message.py +++ b/aries_cloudagent/messaging/agent_message.py @@ -2,7 +2,6 @@ from collections import OrderedDict from typing import Mapping, Optional, Text, Union -import uuid from marshmallow import ( EXCLUDE, @@ -13,6 +12,7 @@ pre_dump, pre_load, ) +from uuid_utils import uuid4 from ..protocols.didcomm_prefix import DIDCommPrefix from ..wallet.base import BaseWallet @@ -75,7 +75,7 @@ def __init__( self._message_id = _id self._message_new_id = False else: - self._message_id = str(uuid.uuid4()) + self._message_id = str(uuid4()) self._message_new_id = True self._message_decorators = ( _decorators if _decorators is not None else DecoratorSet() diff --git a/aries_cloudagent/messaging/credential_definitions/routes.py b/aries_cloudagent/messaging/credential_definitions/routes.py index f70bac14e5..6a84a89ca5 100644 --- a/aries_cloudagent/messaging/credential_definitions/routes.py +++ b/aries_cloudagent/messaging/credential_definitions/routes.py @@ -16,6 +16,7 @@ ) from marshmallow import fields +from ...admin.decorators.auth import tenant_authentication from ...admin.request_context import AdminRequestContext from ...connections.models.conn_record import ConnRecord from ...core.event_bus import Event, EventBus @@ -183,6 +184,7 @@ class CredDefConnIdMatchInfoSchema(OpenAPISchema): @querystring_schema(CreateCredDefTxnForEndorserOptionSchema()) @querystring_schema(CredDefConnIdMatchInfoSchema()) @response_schema(TxnOrCredentialDefinitionSendResultSchema(), 200, description="") +@tenant_authentication async def credential_definitions_send_credential_definition(request: web.BaseRequest): """Request handler for sending a credential definition to the ledger. @@ -378,6 +380,7 @@ async def credential_definitions_send_credential_definition(request: web.BaseReq ) @querystring_schema(CredDefQueryStringSchema()) @response_schema(CredentialDefinitionsCreatedResultSchema(), 200, description="") +@tenant_authentication async def credential_definitions_created(request: web.BaseRequest): """Request handler for retrieving credential definitions that current agent created. @@ -412,6 +415,7 @@ async def credential_definitions_created(request: web.BaseRequest): ) @match_info_schema(CredDefIdMatchInfoSchema()) @response_schema(CredentialDefinitionGetResultSchema(), 200, description="") +@tenant_authentication async def credential_definitions_get_credential_definition(request: web.BaseRequest): """Request handler for getting a credential definition from the ledger. @@ -462,6 +466,7 @@ async def credential_definitions_get_credential_definition(request: web.BaseRequ ) @match_info_schema(CredDefIdMatchInfoSchema()) @response_schema(CredentialDefinitionGetResultSchema(), 200, description="") +@tenant_authentication async def credential_definitions_fix_cred_def_wallet_record(request: web.BaseRequest): """Request handler for fixing a credential definition wallet non-secret record. diff --git a/aries_cloudagent/messaging/credential_definitions/tests/test_routes.py b/aries_cloudagent/messaging/credential_definitions/tests/test_routes.py index b88e7bf2fa..90043bdfef 100644 --- a/aries_cloudagent/messaging/credential_definitions/tests/test_routes.py +++ b/aries_cloudagent/messaging/credential_definitions/tests/test_routes.py @@ -23,7 +23,11 @@ class TestCredentialDefinitionRoutes(IsolatedAsyncioTestCase): def setUp(self): self.session_inject = {} - self.profile = InMemoryProfile.test_profile() + self.profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) self.profile_injector = self.profile.context.injector self.ledger = mock.create_autospec(BaseLedger) @@ -61,6 +65,7 @@ def setUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_send_credential_definition(self): @@ -391,7 +396,7 @@ async def test_get_credential_definition_no_ledger(self): async def test_credential_definition_endpoints_wrong_profile_403(self): self.profile = InMemoryProfile.test_profile( - settings={"wallet-type": "askar"}, + settings={"wallet-type": "askar", "admin.admin_api_key": "secret-key"}, profile_class=AskarAnoncredsProfile, ) self.context = AdminRequestContext.test_context({}, self.profile) @@ -404,6 +409,7 @@ async def test_credential_definition_endpoints_wrong_profile_403(self): query={}, __getitem__=lambda _, k: self.request_dict[k], context=self.context, + headers={"x-api-key": "secret-key"}, ) self.request.json = mock.CoroutineMock( return_value={ diff --git a/aries_cloudagent/messaging/decorators/attach_decorator.py b/aries_cloudagent/messaging/decorators/attach_decorator.py index b68a7e3c61..820cf89c90 100644 --- a/aries_cloudagent/messaging/decorators/attach_decorator.py +++ b/aries_cloudagent/messaging/decorators/attach_decorator.py @@ -5,10 +5,10 @@ import copy import json -import uuid from typing import Any, Mapping, Sequence, Tuple, Union from marshmallow import EXCLUDE, fields, pre_load +from uuid_utils import uuid4 from ...did.did_key import DIDKey from ...wallet.base import BaseWallet @@ -620,7 +620,7 @@ def data_base64_string( byte_count: optional attachment byte count """ return AttachDecorator( - ident=ident or str(uuid.uuid4()), + ident=ident or str(uuid4()), description=description, filename=filename, mime_type="text/string", @@ -655,7 +655,7 @@ def data_base64( """ return AttachDecorator( - ident=ident or str(uuid.uuid4()), + ident=ident or str(uuid4()), description=description, filename=filename, mime_type="application/json", @@ -692,7 +692,7 @@ def data_json( """ return AttachDecorator( - ident=ident or str(uuid.uuid4()), + ident=ident or str(uuid4()), description=description, filename=filename, mime_type="application/json", @@ -731,7 +731,7 @@ def data_links( """ return AttachDecorator( - ident=ident or str(uuid.uuid4()), + ident=ident or str(uuid4()), description=description, filename=filename, mime_type=mime_type or "application/json", diff --git a/aries_cloudagent/messaging/decorators/tests/test_attach_decorator.py b/aries_cloudagent/messaging/decorators/tests/test_attach_decorator.py index c0f1f26169..02d557bec8 100644 --- a/aries_cloudagent/messaging/decorators/tests/test_attach_decorator.py +++ b/aries_cloudagent/messaging/decorators/tests/test_attach_decorator.py @@ -1,15 +1,16 @@ import json -import uuid from copy import deepcopy from datetime import datetime, timezone from unittest import TestCase import pytest +from uuid_utils import uuid4 -from ....indy.sdk.wallet_setup import IndyWalletConfig +from aries_cloudagent.wallet.base import BaseWallet + +from ....core.in_memory import InMemoryProfile from ....messaging.models.base import BaseModelError -from ....wallet.did_method import SOV -from ....wallet.indy import IndySdkWallet +from ....wallet.did_method import SOV, DIDMethods from ....wallet.key_type import ED25519 from ....wallet.util import b64_to_bytes, bytes_to_b64 from ..attach_decorator import ( @@ -53,7 +54,7 @@ "rev_reg": None, "witness": None, } -IDENT = str(uuid.uuid4()) +IDENT = str(uuid4()) DESCRIPTION = 'To one trained by "Bob," Truth can be found in a potato' FILENAME = "potato.png" MIME_TYPE = "image/png" @@ -78,16 +79,10 @@ def seed(): @pytest.fixture() async def wallet(): - wallet = await IndyWalletConfig( - { - "auto_remove": True, - "key": await IndySdkWallet.generate_wallet_key(), - "key_derivation_method": "RAW", - "name": "test-wallet-sign-verify-attach-deco", - } - ).create_wallet() - yield IndySdkWallet(wallet) - await wallet.close() + profile = InMemoryProfile.test_profile(bind={DIDMethods: DIDMethods()}) + async with profile.session() as session: + wallet = session.inject(BaseWallet) + yield wallet class TestAttachDecorator(TestCase): @@ -422,7 +417,6 @@ def test_data_json_external_mutation(self): assert "key_one" not in data -@pytest.mark.indy class TestAttachDecoratorSignature: @pytest.mark.asyncio async def test_did_raw_key(self, wallet, seed): @@ -459,7 +453,6 @@ async def test_indy_sign(self, wallet, seed): assert not deco_indy.data.jws.signatures assert deco_indy.data.header_map(0) is not None assert deco_indy.data.header_map() is not None - assert "kid" in deco_indy.data.header_map() assert "jwk" in deco_indy.data.header_map() assert "kid" in deco_indy.data.header_map()["jwk"] assert deco_indy.data.header_map()["kid"] == did_key(did_info[0].verkey) @@ -491,7 +484,6 @@ async def test_indy_sign(self, wallet, seed): assert not deco_indy.data.jws.signatures assert deco_indy.data.header_map(0) is not None assert deco_indy.data.header_map() is not None - assert "kid" in deco_indy.data.header_map() assert "jwk" in deco_indy.data.header_map() assert "kid" in deco_indy.data.header_map()["jwk"] assert deco_indy.data.header_map()["kid"] == did_key(did_info[0].verkey) @@ -515,11 +507,8 @@ async def test_indy_sign(self, wallet, seed): assert deco_indy.data.jws.signatures for i in range(len(did_info)): assert deco_indy.data.header_map(i) is not None - assert "kid" in deco_indy.data.header_map(i, jose=False) - assert "kid" in deco_indy.data.header_map(i, jose=True) assert "jwk" in deco_indy.data.header_map(i) assert "kid" in deco_indy.data.header_map(i)["jwk"] - assert deco_indy.data.header_map(i)["kid"] == did_key(did_info[i].verkey) assert deco_indy.data.header_map(i)["jwk"]["kid"] == did_key( did_info[i].verkey ) diff --git a/aries_cloudagent/messaging/jsonld/routes.py b/aries_cloudagent/messaging/jsonld/routes.py index 12c8105571..72cfd62fcb 100644 --- a/aries_cloudagent/messaging/jsonld/routes.py +++ b/aries_cloudagent/messaging/jsonld/routes.py @@ -2,10 +2,10 @@ from aiohttp import web from aiohttp_apispec import docs, request_schema, response_schema -from pydid.verification_method import Ed25519VerificationKey2018 - from marshmallow import INCLUDE, Schema, fields +from pydid.verification_method import Ed25519VerificationKey2018 +from ...admin.decorators.auth import tenant_authentication from ...admin.request_context import AdminRequestContext from ...config.base import InjectionError from ...resolver.base import ResolverError @@ -66,6 +66,7 @@ class SignResponseSchema(OpenAPISchema): ) @request_schema(SignRequestSchema()) @response_schema(SignResponseSchema(), 200, description="") +@tenant_authentication async def sign(request: web.BaseRequest): """Request handler for signing a jsonld doc. @@ -130,6 +131,7 @@ class VerifyResponseSchema(OpenAPISchema): ) @request_schema(VerifyRequestSchema()) @response_schema(VerifyResponseSchema(), 200, description="") +@tenant_authentication async def verify(request: web.BaseRequest): """Request handler for signing a jsonld doc. diff --git a/aries_cloudagent/messaging/jsonld/tests/test_routes.py b/aries_cloudagent/messaging/jsonld/tests/test_routes.py index b36a21b162..f48afb3dcb 100644 --- a/aries_cloudagent/messaging/jsonld/tests/test_routes.py +++ b/aries_cloudagent/messaging/jsonld/tests/test_routes.py @@ -1,15 +1,16 @@ -from copy import deepcopy import json +from copy import deepcopy +from unittest import IsolatedAsyncioTestCase +import pytest from aiohttp import web -from unittest import IsolatedAsyncioTestCase -from aries_cloudagent.tests import mock from pyld import jsonld -import pytest -from .. import routes as test_module +from aries_cloudagent.tests import mock + from ....admin.request_context import AdminRequestContext from ....config.base import InjectionError +from ....core.in_memory import InMemoryProfile from ....resolver.base import DIDMethodNotSupported, DIDNotFound, ResolverError from ....resolver.did_resolver import DIDResolver from ....vc.ld_proofs.document_loader import DocumentLoader @@ -17,6 +18,7 @@ from ....wallet.did_method import SOV, DIDMethods from ....wallet.error import WalletError from ....wallet.key_type import ED25519 +from .. import routes as test_module from ..error import ( BadJWSHeaderError, DroppedAttributeError, @@ -84,7 +86,12 @@ def mock_verify_credential(): @pytest.fixture def mock_sign_request(mock_sign_credential): - context = AdminRequestContext.test_context() + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + context = AdminRequestContext.test_context({}, profile) outbound_message_router = mock.CoroutineMock() request_dict = { "context": context, @@ -110,6 +117,7 @@ def mock_sign_request(mock_sign_credential): }, ), __getitem__=lambda _, k: request_dict[k], + headers={"x-api-key": "secret-key"}, ) yield request @@ -137,7 +145,14 @@ def request_body(): @pytest.fixture def mock_verify_request(mock_verify_credential, mock_resolver, request_body): def _mock_verify_request(request_body=request_body): - context = AdminRequestContext.test_context({DIDResolver: mock_resolver}) + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + context = AdminRequestContext.test_context( + {DIDResolver: mock_resolver}, profile + ) outbound_message_router = mock.CoroutineMock() request_dict = { "context": context, @@ -148,6 +163,7 @@ def _mock_verify_request(request_body=request_body): query={}, json=mock.CoroutineMock(return_value=request_body), __getitem__=lambda _, k: request_dict[k], + headers={"x-api-key": "secret-key"}, ) return request @@ -270,7 +286,12 @@ def test_post_process_routes(): class TestJSONLDRoutes(IsolatedAsyncioTestCase): async def asyncSetUp(self): - self.context = AdminRequestContext.test_context() + self.profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + self.context = AdminRequestContext.test_context({}, self.profile) self.context.profile.context.injector.bind_instance( DocumentLoader, custom_document_loader ) @@ -287,6 +308,7 @@ async def asyncSetUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_verify_credential(self): diff --git a/aries_cloudagent/messaging/models/base_record.py b/aries_cloudagent/messaging/models/base_record.py index d080696c88..a61ffedb6c 100644 --- a/aries_cloudagent/messaging/models/base_record.py +++ b/aries_cloudagent/messaging/models/base_record.py @@ -3,11 +3,11 @@ import json import logging import sys -import uuid from datetime import datetime from typing import Any, Mapping, Optional, Sequence, Type, TypeVar, Union from marshmallow import fields +from uuid_utils import uuid4 from ...cache.base import BaseCache from ...config.settings import BaseSettings @@ -355,7 +355,7 @@ async def save( new_record = False else: if not self._id: - self._id = str(uuid.uuid4()) + self._id = str(uuid4()) self.created_at = self.updated_at await storage.add_record(self.storage_record) new_record = True diff --git a/aries_cloudagent/messaging/request_context.py b/aries_cloudagent/messaging/request_context.py index c96fb2e334..171085c4bc 100644 --- a/aries_cloudagent/messaging/request_context.py +++ b/aries_cloudagent/messaging/request_context.py @@ -26,13 +26,13 @@ def __init__( self, profile: Profile, *, - context: InjectionContext = None, - settings: Mapping[str, object] = None + context: Optional[InjectionContext] = None, + settings: Optional[Mapping[str, object]] = None ): """Initialize an instance of RequestContext.""" self._connection_ready = False self._connection_record = None - self._context = (context or profile.context).start_scope("request", settings) + self._context = (context or profile.context).start_scope(settings) self._message = None self._message_receipt = None self._profile = profile diff --git a/aries_cloudagent/messaging/schemas/routes.py b/aries_cloudagent/messaging/schemas/routes.py index 5b8fa38147..6d4e50f9c2 100644 --- a/aries_cloudagent/messaging/schemas/routes.py +++ b/aries_cloudagent/messaging/schemas/routes.py @@ -15,6 +15,7 @@ from marshmallow import fields from marshmallow.validate import Regexp +from ...admin.decorators.auth import tenant_authentication from ...admin.request_context import AdminRequestContext from ...connections.models.conn_record import ConnRecord from ...core.event_bus import Event, EventBus @@ -166,6 +167,7 @@ class SchemaConnIdMatchInfoSchema(OpenAPISchema): @querystring_schema(CreateSchemaTxnForEndorserOptionSchema()) @querystring_schema(SchemaConnIdMatchInfoSchema()) @response_schema(TxnOrSchemaSendResultSchema(), 200, description="") +@tenant_authentication async def schemas_send_schema(request: web.BaseRequest): """Request handler for creating a schema. @@ -340,6 +342,7 @@ async def schemas_send_schema(request: web.BaseRequest): ) @querystring_schema(SchemaQueryStringSchema()) @response_schema(SchemasCreatedResultSchema(), 200, description="") +@tenant_authentication async def schemas_created(request: web.BaseRequest): """Request handler for retrieving schemas that current agent created. @@ -369,6 +372,7 @@ async def schemas_created(request: web.BaseRequest): @docs(tags=["schema"], summary="Gets a schema from the ledger") @match_info_schema(SchemaIdMatchInfoSchema()) @response_schema(SchemaGetResultSchema(), 200, description="") +@tenant_authentication async def schemas_get_schema(request: web.BaseRequest): """Request handler for sending a credential offer. @@ -419,6 +423,7 @@ async def schemas_get_schema(request: web.BaseRequest): @docs(tags=["schema"], summary="Writes a schema non-secret record to the wallet") @match_info_schema(SchemaIdMatchInfoSchema()) @response_schema(SchemaGetResultSchema(), 200, description="") +@tenant_authentication async def schemas_fix_schema_wallet_record(request: web.BaseRequest): """Request handler for fixing a schema's wallet non-secrets records. diff --git a/aries_cloudagent/messaging/schemas/tests/test_routes.py b/aries_cloudagent/messaging/schemas/tests/test_routes.py index 6e42b4c190..411a951d9e 100644 --- a/aries_cloudagent/messaging/schemas/tests/test_routes.py +++ b/aries_cloudagent/messaging/schemas/tests/test_routes.py @@ -22,7 +22,11 @@ class TestSchemaRoutes(IsolatedAsyncioTestCase): def setUp(self): self.session_inject = {} - self.profile = InMemoryProfile.test_profile() + self.profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) self.profile_injector = self.profile.context.injector self.ledger = mock.create_autospec(BaseLedger) self.ledger.__aenter__ = mock.CoroutineMock(return_value=self.ledger) @@ -54,6 +58,7 @@ def setUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_send_schema(self): @@ -402,7 +407,7 @@ async def test_get_schema_x_ledger(self): async def test_schema_endpoints_wrong_profile_403(self): self.profile = InMemoryProfile.test_profile( - settings={"wallet-type": "askar"}, + settings={"wallet-type": "askar", "admin.admin_api_key": "secret-key"}, profile_class=AskarAnoncredsProfile, ) self.context = AdminRequestContext.test_context({}, self.profile) @@ -415,6 +420,7 @@ async def test_schema_endpoints_wrong_profile_403(self): query={}, __getitem__=lambda _, k: self.request_dict[k], context=self.context, + headers={"x-api-key": "secret-key"}, ) self.request.json = mock.CoroutineMock( diff --git a/aries_cloudagent/multitenant/admin/routes.py b/aries_cloudagent/multitenant/admin/routes.py index 4948a7e518..1e84020d98 100644 --- a/aries_cloudagent/multitenant/admin/routes.py +++ b/aries_cloudagent/multitenant/admin/routes.py @@ -10,6 +10,7 @@ ) from marshmallow import ValidationError, fields, validate, validates_schema +from ...admin.decorators.auth import admin_authentication from ...admin.request_context import AdminRequestContext from ...core.error import BaseError from ...core.profile import ProfileManagerProvider @@ -363,6 +364,7 @@ class WalletListQueryStringSchema(OpenAPISchema): @docs(tags=["multitenancy"], summary="Query subwallets") @querystring_schema(WalletListQueryStringSchema()) @response_schema(WalletListSchema(), 200, description="") +@admin_authentication async def wallets_list(request: web.BaseRequest): """Request handler for listing all internal subwallets. @@ -392,6 +394,7 @@ async def wallets_list(request: web.BaseRequest): @docs(tags=["multitenancy"], summary="Get a single subwallet") @match_info_schema(WalletIdMatchInfoSchema()) @response_schema(WalletRecordSchema(), 200, description="") +@admin_authentication async def wallet_get(request: web.BaseRequest): """Request handler for getting a single subwallet. @@ -422,6 +425,7 @@ async def wallet_get(request: web.BaseRequest): @docs(tags=["multitenancy"], summary="Create a subwallet") @request_schema(CreateWalletRequestSchema) @response_schema(CreateWalletResponseSchema(), 200, description="") +@admin_authentication async def wallet_create(request: web.BaseRequest): """Request handler for adding a new subwallet for handling by the agent. @@ -495,6 +499,7 @@ async def wallet_create(request: web.BaseRequest): @match_info_schema(WalletIdMatchInfoSchema()) @request_schema(UpdateWalletRequestSchema) @response_schema(WalletRecordSchema(), 200, description="") +@admin_authentication async def wallet_update(request: web.BaseRequest): """Request handler for updating a existing subwallet for handling by the agent. @@ -559,6 +564,7 @@ async def wallet_update(request: web.BaseRequest): @docs(tags=["multitenancy"], summary="Get auth token for a subwallet") @request_schema(CreateWalletTokenRequestSchema) @response_schema(CreateWalletTokenResponseSchema(), 200, description="") +@admin_authentication async def wallet_create_token(request: web.BaseRequest): """Request handler for creating an authorization token for a specific subwallet. @@ -603,6 +609,7 @@ async def wallet_create_token(request: web.BaseRequest): @match_info_schema(WalletIdMatchInfoSchema()) @request_schema(RemoveWalletRequestSchema) @response_schema(MultitenantModuleResponseSchema(), 200, description="") +@admin_authentication async def wallet_remove(request: web.BaseRequest): """Request handler to remove a subwallet from agent and storage. diff --git a/aries_cloudagent/multitenant/admin/tests/test_routes.py b/aries_cloudagent/multitenant/admin/tests/test_routes.py index 9f4a6d32ba..7576591968 100644 --- a/aries_cloudagent/multitenant/admin/tests/test_routes.py +++ b/aries_cloudagent/multitenant/admin/tests/test_routes.py @@ -24,7 +24,7 @@ async def asyncSetUp(self): return_value=self.mock_multitenant_mgr ) self.profile = InMemoryProfile.test_profile( - settings={"wallet.type": "askar"}, + settings={"wallet.type": "askar", "admin.admin_api_key": "secret-key"}, profile_class=AskarProfile, ) self.context = AdminRequestContext.test_context({}, self.profile) @@ -45,13 +45,18 @@ async def asyncSetUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_format_wallet_record_removes_wallet_key(self): wallet_record = WalletRecord( wallet_id="test", key_management_mode=WalletRecord.MODE_MANAGED, - settings={"wallet.name": "wallet_name", "wallet.key": "wallet_key"}, + settings={ + "wallet.name": "wallet_name", + "wallet.key": "wallet_key", + "admin.admin_api_key": "secret-key", + }, ) formatted = test_module.format_wallet_record(wallet_record) diff --git a/aries_cloudagent/multitenant/manager.py b/aries_cloudagent/multitenant/manager.py index 550389f0db..4f1cf89134 100644 --- a/aries_cloudagent/multitenant/manager.py +++ b/aries_cloudagent/multitenant/manager.py @@ -3,6 +3,7 @@ import logging from typing import Iterable, Optional +from ..askar.profile_anon import AskarAnoncredsProfile from ..config.injection_context import InjectionContext from ..config.wallet import wallet_config from ..core.profile import Profile @@ -84,6 +85,13 @@ async def get_wallet_profile( profile, _ = await wallet_config(context, provision=provision) self._profiles.put(wallet_id, profile) + # return anoncreds profile if explicitly set as wallet type + if profile.context.settings.get("wallet.type") == "askar-anoncreds": + return AskarAnoncredsProfile( + profile.opened, + profile.context, + ) + return profile async def update_wallet(self, wallet_id: str, new_settings: dict) -> WalletRecord: diff --git a/aries_cloudagent/protocols/actionmenu/v1_0/routes.py b/aries_cloudagent/protocols/actionmenu/v1_0/routes.py index c9c94af9d6..802fa75ebf 100644 --- a/aries_cloudagent/protocols/actionmenu/v1_0/routes.py +++ b/aries_cloudagent/protocols/actionmenu/v1_0/routes.py @@ -4,9 +4,9 @@ from aiohttp import web from aiohttp_apispec import docs, match_info_schema, request_schema, response_schema - from marshmallow import fields +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext from ....connections.models.conn_record import ConnRecord from ....messaging.models.base import BaseModelError @@ -95,6 +95,7 @@ class ActionMenuFetchResultSchema(OpenAPISchema): ) @match_info_schema(MenuConnIdMatchInfoSchema()) @response_schema(ActionMenuModulesResultSchema(), 200, description="") +@tenant_authentication async def actionmenu_close(request: web.BaseRequest): """Request handler for closing the menu associated with a connection. @@ -122,6 +123,7 @@ async def actionmenu_close(request: web.BaseRequest): @docs(tags=["action-menu"], summary="Fetch the active menu") @match_info_schema(MenuConnIdMatchInfoSchema()) @response_schema(ActionMenuFetchResultSchema(), 200, description="") +@tenant_authentication async def actionmenu_fetch(request: web.BaseRequest): """Request handler for fetching the previously-received menu for a connection. @@ -141,6 +143,7 @@ async def actionmenu_fetch(request: web.BaseRequest): @match_info_schema(MenuConnIdMatchInfoSchema()) @request_schema(PerformRequestSchema()) @response_schema(ActionMenuModulesResultSchema(), 200, description="") +@tenant_authentication async def actionmenu_perform(request: web.BaseRequest): """Request handler for performing a menu action. @@ -170,6 +173,7 @@ async def actionmenu_perform(request: web.BaseRequest): @docs(tags=["action-menu"], summary="Request the active menu") @match_info_schema(MenuConnIdMatchInfoSchema()) @response_schema(ActionMenuModulesResultSchema(), 200, description="") +@tenant_authentication async def actionmenu_request(request: web.BaseRequest): """Request handler for requesting a menu from the connection target. @@ -200,6 +204,7 @@ async def actionmenu_request(request: web.BaseRequest): @match_info_schema(MenuConnIdMatchInfoSchema()) @request_schema(SendMenuSchema()) @response_schema(ActionMenuModulesResultSchema(), 200, description="") +@tenant_authentication async def actionmenu_send(request: web.BaseRequest): """Request handler for requesting a menu from the connection target. diff --git a/aries_cloudagent/protocols/actionmenu/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/actionmenu/v1_0/tests/test_routes.py index 0d157842e8..31a1e00d85 100644 --- a/aries_cloudagent/protocols/actionmenu/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/actionmenu/v1_0/tests/test_routes.py @@ -1,16 +1,22 @@ from unittest import IsolatedAsyncioTestCase + from aries_cloudagent.tests import mock from .....admin.request_context import AdminRequestContext +from .....core.in_memory import InMemoryProfile from .....storage.error import StorageNotFoundError - from .. import routes as test_module class TestActionMenuRoutes(IsolatedAsyncioTestCase): def setUp(self): self.session_inject = {} - self.context = AdminRequestContext.test_context(self.session_inject) + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + self.context = AdminRequestContext.test_context(self.session_inject, profile) self.request_dict = { "context": self.context, "outbound_message_router": mock.CoroutineMock(), @@ -20,6 +26,7 @@ def setUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_actionmenu_close(self): diff --git a/aries_cloudagent/protocols/basicmessage/v1_0/routes.py b/aries_cloudagent/protocols/basicmessage/v1_0/routes.py index 7fffa930e1..015318eb5f 100644 --- a/aries_cloudagent/protocols/basicmessage/v1_0/routes.py +++ b/aries_cloudagent/protocols/basicmessage/v1_0/routes.py @@ -2,9 +2,9 @@ from aiohttp import web from aiohttp_apispec import docs, match_info_schema, request_schema, response_schema - from marshmallow import fields +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext from ....connections.models.conn_record import ConnRecord from ....messaging.models.openapi import OpenAPISchema @@ -39,6 +39,7 @@ class BasicConnIdMatchInfoSchema(OpenAPISchema): @match_info_schema(BasicConnIdMatchInfoSchema()) @request_schema(SendMessageSchema()) @response_schema(BasicMessageModuleResponseSchema(), 200, description="") +@tenant_authentication async def connections_send_message(request: web.BaseRequest): """Request handler for sending a basic message to a connection. diff --git a/aries_cloudagent/protocols/basicmessage/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/basicmessage/v1_0/tests/test_routes.py index de3373f053..7d6c5b069c 100644 --- a/aries_cloudagent/protocols/basicmessage/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/basicmessage/v1_0/tests/test_routes.py @@ -1,16 +1,22 @@ from unittest import IsolatedAsyncioTestCase + from aries_cloudagent.tests import mock from .....admin.request_context import AdminRequestContext +from .....core.in_memory import InMemoryProfile from .....storage.error import StorageNotFoundError - from .. import routes as test_module class TestBasicMessageRoutes(IsolatedAsyncioTestCase): async def asyncSetUp(self): self.session_inject = {} - self.context = AdminRequestContext.test_context(self.session_inject) + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + self.context = AdminRequestContext.test_context(self.session_inject, profile) self.request_dict = { "context": self.context, "outbound_message_router": mock.CoroutineMock(), @@ -20,6 +26,7 @@ async def asyncSetUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) self.test_conn_id = "connection-id" diff --git a/aries_cloudagent/protocols/connections/v1_0/routes.py b/aries_cloudagent/protocols/connections/v1_0/routes.py index 5d3a0c6e66..e067b547af 100644 --- a/aries_cloudagent/protocols/connections/v1_0/routes.py +++ b/aries_cloudagent/protocols/connections/v1_0/routes.py @@ -11,9 +11,9 @@ request_schema, response_schema, ) - from marshmallow import fields, validate, validates_schema +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext from ....cache.base import BaseCache from ....connections.models.conn_record import ConnRecord, ConnRecordSchema @@ -22,13 +22,13 @@ from ....messaging.valid import ( ENDPOINT_EXAMPLE, ENDPOINT_VALIDATE, + GENERIC_DID_VALIDATE, INDY_DID_EXAMPLE, INDY_DID_VALIDATE, INDY_RAW_PUBLIC_KEY_EXAMPLE, INDY_RAW_PUBLIC_KEY_VALIDATE, UUID4_EXAMPLE, UUID4_VALIDATE, - GENERIC_DID_VALIDATE, ) from ....storage.error import StorageError, StorageNotFoundError from ....wallet.error import WalletError @@ -430,6 +430,7 @@ def connection_sort_key(conn): ) @querystring_schema(ConnectionsListQueryStringSchema()) @response_schema(ConnectionListSchema(), 200, description="") +@tenant_authentication async def connections_list(request: web.BaseRequest): """Request handler for searching connection records. @@ -484,6 +485,7 @@ async def connections_list(request: web.BaseRequest): @docs(tags=["connection"], summary="Fetch a single connection record") @match_info_schema(ConnectionsConnIdMatchInfoSchema()) @response_schema(ConnRecordSchema(), 200, description="") +@tenant_authentication async def connections_retrieve(request: web.BaseRequest): """Request handler for fetching a single connection record. @@ -513,6 +515,7 @@ async def connections_retrieve(request: web.BaseRequest): @docs(tags=["connection"], summary="Fetch connection remote endpoint") @match_info_schema(ConnectionsConnIdMatchInfoSchema()) @response_schema(EndpointsResultSchema(), 200, description="") +@tenant_authentication async def connections_endpoints(request: web.BaseRequest): """Request handler for fetching connection endpoints. @@ -542,6 +545,7 @@ async def connections_endpoints(request: web.BaseRequest): @match_info_schema(ConnectionsConnIdMatchInfoSchema()) @querystring_schema(ConnectionMetadataQuerySchema()) @response_schema(ConnectionMetadataSchema(), 200, description="") +@tenant_authentication async def connections_metadata(request: web.BaseRequest): """Handle fetching metadata associated with a single connection record.""" context: AdminRequestContext = request["context"] @@ -568,6 +572,7 @@ async def connections_metadata(request: web.BaseRequest): @match_info_schema(ConnectionsConnIdMatchInfoSchema()) @request_schema(ConnectionMetadataSetRequestSchema()) @response_schema(ConnectionMetadataSchema(), 200, description="") +@tenant_authentication async def connections_metadata_set(request: web.BaseRequest): """Handle fetching metadata associated with a single connection record.""" context: AdminRequestContext = request["context"] @@ -597,6 +602,7 @@ async def connections_metadata_set(request: web.BaseRequest): @querystring_schema(CreateInvitationQueryStringSchema()) @request_schema(CreateInvitationRequestSchema()) @response_schema(InvitationResultSchema(), 200, description="") +@tenant_authentication async def connections_create_invitation(request: web.BaseRequest): """Request handler for creating a new connection invitation. @@ -671,6 +677,7 @@ async def connections_create_invitation(request: web.BaseRequest): @querystring_schema(ReceiveInvitationQueryStringSchema()) @request_schema(ReceiveInvitationRequestSchema()) @response_schema(ConnRecordSchema(), 200, description="") +@tenant_authentication async def connections_receive_invitation(request: web.BaseRequest): """Request handler for receiving a new connection invitation. @@ -713,6 +720,7 @@ async def connections_receive_invitation(request: web.BaseRequest): @match_info_schema(ConnectionsConnIdMatchInfoSchema()) @querystring_schema(AcceptInvitationQueryStringSchema()) @response_schema(ConnRecordSchema(), 200, description="") +@tenant_authentication async def connections_accept_invitation(request: web.BaseRequest): """Request handler for accepting a stored connection invitation. @@ -764,6 +772,7 @@ async def connections_accept_invitation(request: web.BaseRequest): @match_info_schema(ConnectionsConnIdMatchInfoSchema()) @querystring_schema(AcceptRequestQueryStringSchema()) @response_schema(ConnRecordSchema(), 200, description="") +@tenant_authentication async def connections_accept_request(request: web.BaseRequest): """Request handler for accepting a stored connection request. @@ -798,6 +807,7 @@ async def connections_accept_request(request: web.BaseRequest): @docs(tags=["connection"], summary="Remove an existing connection record") @match_info_schema(ConnectionsConnIdMatchInfoSchema()) @response_schema(ConnectionModuleResponseSchema, 200, description="") +@tenant_authentication async def connections_remove(request: web.BaseRequest): """Request handler for removing a connection record. @@ -826,6 +836,7 @@ async def connections_remove(request: web.BaseRequest): @docs(tags=["connection"], summary="Create a new static connection") @request_schema(ConnectionStaticRequestSchema()) @response_schema(ConnectionStaticResultSchema(), 200, description="") +@tenant_authentication async def connections_create_static(request: web.BaseRequest): """Request handler for creating a new static connection. diff --git a/aries_cloudagent/protocols/connections/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/connections/v1_0/tests/test_routes.py index d880f17e59..d561e8f0a0 100644 --- a/aries_cloudagent/protocols/connections/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/connections/v1_0/tests/test_routes.py @@ -1,22 +1,27 @@ import json - -from unittest.mock import ANY from unittest import IsolatedAsyncioTestCase +from unittest.mock import ANY + from aries_cloudagent.tests import mock from .....admin.request_context import AdminRequestContext from .....cache.base import BaseCache from .....cache.in_memory import InMemoryCache from .....connections.models.conn_record import ConnRecord +from .....core.in_memory import InMemoryProfile from .....storage.error import StorageNotFoundError - from .. import routes as test_module class TestConnectionRoutes(IsolatedAsyncioTestCase): async def asyncSetUp(self): self.session_inject = {} - self.context = AdminRequestContext.test_context(self.session_inject) + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + self.context = AdminRequestContext.test_context(self.session_inject, profile) self.request_dict = { "context": self.context, "outbound_message_router": mock.CoroutineMock(), @@ -26,6 +31,7 @@ async def asyncSetUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_connections_list(self): diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/routes.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/routes.py index dd4e100081..8d8c97231e 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/routes.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/routes.py @@ -8,9 +8,9 @@ request_schema, response_schema, ) - from marshmallow import fields, validate +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext from ....connections.models.conn_record import ConnRecord from ....messaging.models.base import BaseModelError @@ -169,6 +169,7 @@ def mediation_sort_key(mediation: dict): ) @querystring_schema(MediationListQueryStringSchema()) @response_schema(MediationListSchema(), 200) +@tenant_authentication async def list_mediation_requests(request: web.BaseRequest): """List mediation requests for either client or server role.""" context: AdminRequestContext = request["context"] @@ -194,6 +195,7 @@ async def list_mediation_requests(request: web.BaseRequest): @docs(tags=["mediation"], summary="Retrieve mediation request record") @match_info_schema(MediationIdMatchInfoSchema()) @response_schema(MediationRecordSchema(), 200) +@tenant_authentication async def retrieve_mediation_request(request: web.BaseRequest): """Retrieve a single mediation request.""" context: AdminRequestContext = request["context"] @@ -216,6 +218,7 @@ async def retrieve_mediation_request(request: web.BaseRequest): @docs(tags=["mediation"], summary="Delete mediation request by ID") @match_info_schema(MediationIdMatchInfoSchema()) @response_schema(MediationRecordSchema, 200) +@tenant_authentication async def delete_mediation_request(request: web.BaseRequest): """Delete a mediation request by ID.""" context: AdminRequestContext = request["context"] @@ -241,6 +244,7 @@ async def delete_mediation_request(request: web.BaseRequest): @match_info_schema(ConnectionsConnIdMatchInfoSchema()) @request_schema(MediationCreateRequestSchema()) @response_schema(MediationRecordSchema(), 201) +@tenant_authentication async def request_mediation(request: web.BaseRequest): """Request mediation from connection.""" context: AdminRequestContext = request["context"] @@ -280,6 +284,7 @@ async def request_mediation(request: web.BaseRequest): @docs(tags=["mediation"], summary="Grant received mediation") @match_info_schema(MediationIdMatchInfoSchema()) @response_schema(MediationGrantSchema(), 201) +@tenant_authentication async def mediation_request_grant(request: web.BaseRequest): """Grant a stored mediation request.""" context: AdminRequestContext = request["context"] @@ -303,6 +308,7 @@ async def mediation_request_grant(request: web.BaseRequest): @match_info_schema(MediationIdMatchInfoSchema()) @request_schema(AdminMediationDenySchema()) @response_schema(MediationDenySchema(), 201) +@tenant_authentication async def mediation_request_deny(request: web.BaseRequest): """Deny a stored mediation request.""" context: AdminRequestContext = request["context"] @@ -329,6 +335,7 @@ async def mediation_request_deny(request: web.BaseRequest): ) @querystring_schema(GetKeylistQuerySchema()) @response_schema(KeylistSchema(), 200) +@tenant_authentication async def get_keylist(request: web.BaseRequest): """Retrieve keylists by connection or role.""" context: AdminRequestContext = request["context"] @@ -358,6 +365,7 @@ async def get_keylist(request: web.BaseRequest): @querystring_schema(KeylistQueryPaginateQuerySchema()) @request_schema(KeylistQueryFilterRequestSchema()) @response_schema(KeylistQuerySchema(), 201) +@tenant_authentication async def send_keylist_query(request: web.BaseRequest): """Send keylist query to mediator.""" context: AdminRequestContext = request["context"] @@ -394,6 +402,7 @@ async def send_keylist_query(request: web.BaseRequest): @match_info_schema(MediationIdMatchInfoSchema()) @request_schema(KeylistUpdateRequestSchema()) @response_schema(KeylistUpdateSchema(), 201) +@tenant_authentication async def send_keylist_update(request: web.BaseRequest): """Send keylist update to mediator.""" context: AdminRequestContext = request["context"] @@ -439,6 +448,7 @@ async def send_keylist_update(request: web.BaseRequest): @docs(tags=["mediation"], summary="Get default mediator") @response_schema(MediationRecordSchema(), 200) +@tenant_authentication async def get_default_mediator(request: web.BaseRequest): """Get default mediator.""" context: AdminRequestContext = request["context"] @@ -455,6 +465,7 @@ async def get_default_mediator(request: web.BaseRequest): @docs(tags=["mediation"], summary="Set default mediator") @match_info_schema(MediationIdMatchInfoSchema()) @response_schema(MediationRecordSchema(), 201) +@tenant_authentication async def set_default_mediator(request: web.BaseRequest): """Set default mediator.""" context: AdminRequestContext = request["context"] @@ -471,6 +482,7 @@ async def set_default_mediator(request: web.BaseRequest): @docs(tags=["mediation"], summary="Clear default mediator") @response_schema(MediationRecordSchema(), 201) +@tenant_authentication async def clear_default_mediator(request: web.BaseRequest): """Clear set default mediator.""" context: AdminRequestContext = request["context"] @@ -489,6 +501,7 @@ async def clear_default_mediator(request: web.BaseRequest): @request_schema(MediationIdMatchInfoSchema()) # TODO Fix this response so that it adequately represents Optionals @response_schema(KeylistUpdateSchema(), 200) +@tenant_authentication async def update_keylist_for_connection(request: web.BaseRequest): """Update keylist for a connection.""" context: AdminRequestContext = request["context"] diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_routes.py index bdf9911c32..2b3ecd3c04 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_routes.py @@ -1,18 +1,23 @@ -from aries_cloudagent.tests import mock from unittest import IsolatedAsyncioTestCase -from .. import routes as test_module +from aries_cloudagent.tests import mock + from .....admin.request_context import AdminRequestContext from .....core.in_memory import InMemoryProfile from .....storage.error import StorageError, StorageNotFoundError +from .....wallet.did_method import DIDMethods +from .. import routes as test_module from ..models.mediation_record import MediationRecord from ..route_manager import RouteManager -from .....wallet.did_method import DIDMethods class TestCoordinateMediationRoutes(IsolatedAsyncioTestCase): def setUp(self): - self.profile = InMemoryProfile.test_profile() + self.profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) self.profile.context.injector.bind_instance(DIDMethods, DIDMethods()) self.context = AdminRequestContext.test_context(profile=self.profile) self.outbound_message_router = mock.CoroutineMock() @@ -28,6 +33,7 @@ def setUp(self): query={}, json=mock.CoroutineMock(return_value={}), __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) serialized = { "mediation_id": "fake_id", diff --git a/aries_cloudagent/protocols/did_rotate/v1_0/messages/tests/test_rotate.py b/aries_cloudagent/protocols/did_rotate/v1_0/messages/tests/test_rotate.py index 66e1558fce..b26c65785c 100644 --- a/aries_cloudagent/protocols/did_rotate/v1_0/messages/tests/test_rotate.py +++ b/aries_cloudagent/protocols/did_rotate/v1_0/messages/tests/test_rotate.py @@ -8,7 +8,6 @@ class TestRotate(TestCase): - def test_init_type(self): """Test initializer.""" diff --git a/aries_cloudagent/protocols/did_rotate/v1_0/routes.py b/aries_cloudagent/protocols/did_rotate/v1_0/routes.py index be72612b7f..f441ded27b 100644 --- a/aries_cloudagent/protocols/did_rotate/v1_0/routes.py +++ b/aries_cloudagent/protocols/did_rotate/v1_0/routes.py @@ -6,6 +6,7 @@ from aiohttp_apispec import docs, json_schema, match_info_schema, response_schema from marshmallow import fields +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext from ....connections.models.conn_record import ConnRecord from ....messaging.models.openapi import OpenAPISchema @@ -46,6 +47,7 @@ class DIDRotateRequestJSONSchema(OpenAPISchema): @response_schema( RotateMessageSchema(), 200, description="Rotate agent message for observer" ) +@tenant_authentication async def rotate(request: web.BaseRequest): """Request to rotate a DID.""" @@ -77,6 +79,7 @@ async def rotate(request: web.BaseRequest): @response_schema( HangupMessageSchema(), 200, description="Hangup agent message for observer" ) +@tenant_authentication async def hangup(request: web.BaseRequest): """Hangup a DID rotation.""" diff --git a/aries_cloudagent/protocols/did_rotate/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/did_rotate/v1_0/tests/test_routes.py index a4b68f08c4..f596005cf3 100644 --- a/aries_cloudagent/protocols/did_rotate/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/did_rotate/v1_0/tests/test_routes.py @@ -2,12 +2,13 @@ from unittest import IsolatedAsyncioTestCase from .....admin.request_context import AdminRequestContext +from .....core.in_memory import InMemoryProfile from .....protocols.didcomm_prefix import DIDCommPrefix from .....storage.error import StorageNotFoundError from .....tests import mock -from ..messages import Hangup, Rotate from .. import message_types as test_message_types from .. import routes as test_module +from ..messages import Hangup, Rotate from ..tests import MockConnRecord, test_conn_id test_valid_rotate_request = { @@ -28,8 +29,12 @@ def generate_mock_rotate_message(): class TestDIDRotateRoutes(IsolatedAsyncioTestCase): async def asyncSetUp(self): self.session_inject = {} - - self.context = AdminRequestContext.test_context(self.session_inject) + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + self.context = AdminRequestContext.test_context(self.session_inject, profile) self.request_dict = { "context": self.context, "outbound_message_router": mock.CoroutineMock(), @@ -39,6 +44,7 @@ async def asyncSetUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) @mock.patch.object( @@ -107,7 +113,6 @@ async def test_rotate_conn_not_found(self): "retrieve_by_id", mock.CoroutineMock(side_effect=StorageNotFoundError()), ) as mock_retrieve_by_id: - with self.assertRaises(test_module.web.HTTPNotFound): await test_module.rotate(self.request) diff --git a/aries_cloudagent/protocols/didexchange/v1_0/routes.py b/aries_cloudagent/protocols/didexchange/v1_0/routes.py index 7826aacbd4..f9c21e8266 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/routes.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/routes.py @@ -12,6 +12,7 @@ ) from marshmallow import fields, validate +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext from ....connections.models.conn_record import ConnRecord, ConnRecordSchema from ....messaging.models.base import BaseModelError @@ -238,7 +239,8 @@ class DIDXRejectRequestSchema(OpenAPISchema): @match_info_schema(DIDXConnIdMatchInfoSchema()) @querystring_schema(DIDXAcceptInvitationQueryStringSchema()) @response_schema(ConnRecordSchema(), 200, description="") -async def didx_accept_invitation(request: web.Request): +@tenant_authentication +async def didx_accept_invitation(request: web.BaseRequest): """Request handler for accepting a stored connection invitation. Args: @@ -300,6 +302,7 @@ async def didx_accept_invitation(request: web.Request): ) @querystring_schema(DIDXCreateRequestImplicitQueryStringSchema()) @response_schema(ConnRecordSchema(), 200, description="") +@tenant_authentication async def didx_create_request_implicit(request: web.BaseRequest): """Request handler for creating and sending a request to an implicit invitation. @@ -358,6 +361,7 @@ async def didx_create_request_implicit(request: web.BaseRequest): @querystring_schema(DIDXReceiveRequestImplicitQueryStringSchema()) @request_schema(DIDXRequestSchema()) @response_schema(ConnRecordSchema(), 200, description="") +@tenant_authentication async def didx_receive_request_implicit(request: web.BaseRequest): """Request handler for receiving a request against public DID's implicit invitation. @@ -400,6 +404,7 @@ async def didx_receive_request_implicit(request: web.BaseRequest): @match_info_schema(DIDXConnIdMatchInfoSchema()) @querystring_schema(DIDXAcceptRequestQueryStringSchema()) @response_schema(ConnRecordSchema(), 200, description="") +@tenant_authentication async def didx_accept_request(request: web.BaseRequest): """Request handler for accepting a stored connection request. @@ -445,6 +450,7 @@ async def didx_accept_request(request: web.BaseRequest): @match_info_schema(DIDXConnIdMatchInfoSchema()) @request_schema(DIDXRejectRequestSchema()) @response_schema(ConnRecordSchema(), 200, description="") +@tenant_authentication async def didx_reject(request: web.BaseRequest): """Abandon or reject a DID Exchange.""" context: AdminRequestContext = request["context"] diff --git a/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py b/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py index f3c5d6d969..405f912a35 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py @@ -99,7 +99,7 @@ async def asyncSetUp(self): "debug.auto_accept_invites": True, "debug.auto_accept_requests": True, "multitenant.enabled": True, - "wallet.id": True, + "wallet.id": "test-wallet-id", }, bind={ BaseResponder: self.responder, diff --git a/aries_cloudagent/protocols/didexchange/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/didexchange/v1_0/tests/test_routes.py index 2888c91166..a06edc2bcb 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/tests/test_routes.py @@ -1,16 +1,23 @@ from unittest import IsolatedAsyncioTestCase + from aries_cloudagent.tests import mock -from .. import routes as test_module from .....admin.request_context import AdminRequestContext +from .....core.in_memory import InMemoryProfile from .....storage.error import StorageNotFoundError from ....coordinate_mediation.v1_0.route_manager import RouteManager +from .. import routes as test_module class TestDIDExchangeConnRoutes(IsolatedAsyncioTestCase): async def asyncSetUp(self): self.session_inject = {} - self.context = AdminRequestContext.test_context(self.session_inject) + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + self.context = AdminRequestContext.test_context(self.session_inject, profile) self.profile = self.context.profile self.request_dict = { "context": self.context, @@ -21,6 +28,7 @@ async def asyncSetUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) self.profile.context.injector.bind_instance(RouteManager, mock.MagicMock()) diff --git a/aries_cloudagent/protocols/discovery/v1_0/routes.py b/aries_cloudagent/protocols/discovery/v1_0/routes.py index f9d282b4cc..04416b8045 100644 --- a/aries_cloudagent/protocols/discovery/v1_0/routes.py +++ b/aries_cloudagent/protocols/discovery/v1_0/routes.py @@ -2,9 +2,9 @@ from aiohttp import web from aiohttp_apispec import docs, querystring_schema, response_schema - from marshmallow import fields +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext from ....messaging.models.base import BaseModelError from ....messaging.models.openapi import OpenAPISchema @@ -66,6 +66,7 @@ class QueryDiscoveryExchRecordsSchema(OpenAPISchema): ) @querystring_schema(QueryFeaturesQueryStringSchema()) @response_schema(V10DiscoveryRecordSchema(), 200, description="") +@tenant_authentication async def query_features(request: web.BaseRequest): """Request handler for creating and sending feature query. @@ -96,6 +97,7 @@ async def query_features(request: web.BaseRequest): ) @querystring_schema(QueryDiscoveryExchRecordsSchema()) @response_schema(V10DiscoveryExchangeListResultSchema(), 200, description="") +@tenant_authentication async def query_records(request: web.BaseRequest): """Request handler for looking up V10DiscoveryExchangeRecord. diff --git a/aries_cloudagent/protocols/discovery/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/discovery/v1_0/tests/test_routes.py index ce4a4a34e6..4af4af5a8c 100644 --- a/aries_cloudagent/protocols/discovery/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/discovery/v1_0/tests/test_routes.py @@ -1,10 +1,10 @@ from unittest import IsolatedAsyncioTestCase -from aries_cloudagent.tests import mock +from aries_cloudagent.tests import mock from .....admin.request_context import AdminRequestContext +from .....core.in_memory import InMemoryProfile from .....storage.error import StorageError - from .. import routes as test_module from ..manager import V10DiscoveryMgr from ..messages.query import Query @@ -14,7 +14,12 @@ class TestDiscoveryRoutes(IsolatedAsyncioTestCase): async def asyncSetUp(self): self.session_inject = {} - self.context = AdminRequestContext.test_context(self.session_inject) + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + self.context = AdminRequestContext.test_context(self.session_inject, profile) self.profile = self.context.profile self.request_dict = { "context": self.context, @@ -25,6 +30,7 @@ async def asyncSetUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_query_features(self): diff --git a/aries_cloudagent/protocols/discovery/v2_0/routes.py b/aries_cloudagent/protocols/discovery/v2_0/routes.py index aeac69a424..bf2adb78b0 100644 --- a/aries_cloudagent/protocols/discovery/v2_0/routes.py +++ b/aries_cloudagent/protocols/discovery/v2_0/routes.py @@ -2,9 +2,9 @@ from aiohttp import web from aiohttp_apispec import docs, querystring_schema, response_schema - from marshmallow import fields +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext from ....messaging.models.base import BaseModelError from ....messaging.models.openapi import OpenAPISchema @@ -76,6 +76,7 @@ class QueryDiscoveryExchRecordsSchema(OpenAPISchema): ) @querystring_schema(QueryFeaturesQueryStringSchema()) @response_schema(V20DiscoveryExchangeResultSchema(), 200, description="") +@tenant_authentication async def query_features(request: web.BaseRequest): """Request handler for creating and sending feature queries. @@ -106,6 +107,7 @@ async def query_features(request: web.BaseRequest): ) @querystring_schema(QueryDiscoveryExchRecordsSchema()) @response_schema(V20DiscoveryExchangeListResultSchema(), 200, description="") +@tenant_authentication async def query_records(request: web.BaseRequest): """Request handler for looking up V20DiscoveryExchangeRecord. diff --git a/aries_cloudagent/protocols/discovery/v2_0/tests/test_routes.py b/aries_cloudagent/protocols/discovery/v2_0/tests/test_routes.py index d6c5ecd2f6..bcd542227e 100644 --- a/aries_cloudagent/protocols/discovery/v2_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/discovery/v2_0/tests/test_routes.py @@ -1,10 +1,10 @@ from unittest import IsolatedAsyncioTestCase -from aries_cloudagent.tests import mock +from aries_cloudagent.tests import mock from .....admin.request_context import AdminRequestContext +from .....core.in_memory import InMemoryProfile from .....storage.error import StorageError - from .. import routes as test_module from ..manager import V20DiscoveryMgr from ..messages.queries import Queries, QueryItem @@ -14,7 +14,12 @@ class TestDiscoveryRoutes(IsolatedAsyncioTestCase): async def asyncSetUp(self): self.session_inject = {} - self.context = AdminRequestContext.test_context(self.session_inject) + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + self.context = AdminRequestContext.test_context(self.session_inject, profile) self.profile = self.context.profile self.request_dict = { "context": self.context, @@ -25,6 +30,7 @@ async def asyncSetUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_query_features(self): diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/manager.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/manager.py index a96a2e8fe9..9fca6aec4b 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/manager.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/manager.py @@ -2,9 +2,10 @@ import json import logging -import uuid from asyncio import shield +from uuid_utils import uuid4 + from ....anoncreds.issuer import AnonCredsIssuer from ....anoncreds.revocation import AnonCredsRevocation from ....connections.models.conn_record import ConnRecord @@ -22,10 +23,7 @@ from ....storage.error import StorageError, StorageNotFoundError from ....transport.inbound.receipt import MessageReceipt from ....wallet.base import BaseWallet -from ....wallet.util import ( - notify_endorse_did_attrib_event, - notify_endorse_did_event, -) +from ....wallet.util import notify_endorse_did_attrib_event, notify_endorse_did_event from .messages.cancel_transaction import CancelTransaction from .messages.endorsed_transaction_response import EndorsedTransactionResponse from .messages.refused_transaction_response import RefusedTransactionResponse @@ -78,7 +76,7 @@ async def create_record( """ messages_attach_dict = { - "@id": str(uuid.uuid4()), + "@id": str(uuid4()), "mime-type": "application/json", "data": {"json": messages_attach}, } @@ -415,6 +413,9 @@ async def complete_transaction( if (not endorser) and ( txn_goal_code != TransactionRecord.WRITE_DID_TRANSACTION ): + ledger = self.profile.inject(BaseLedger) + if not ledger: + raise TransactionManagerError("No ledger available") if ( self._profile.context.settings.get_value("wallet.type") == "askar-anoncreds" @@ -425,13 +426,9 @@ async def complete_transaction( legacy_indy_registry = LegacyIndyRegistry() ledger_response_json = await legacy_indy_registry.txn_submit( - self._profile, ledger_transaction, sign=False, taa_accept=False + ledger, ledger_transaction, sign=False, taa_accept=False ) else: - ledger = self.profile.inject(BaseLedger) - if not ledger: - raise TransactionManagerError("No ledger available") - async with ledger: try: ledger_response_json = await shield( diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py index 5631c161e2..f4ab0f2ebc 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py @@ -12,6 +12,7 @@ ) from marshmallow import fields, validate +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext from ....connections.models.conn_record import ConnRecord from ....core.event_bus import Event, EventBus @@ -124,6 +125,7 @@ class EndorserInfoSchema(OpenAPISchema): ) @querystring_schema(TransactionsListQueryStringSchema()) @response_schema(TransactionListSchema(), 200) +@tenant_authentication async def transactions_list(request: web.BaseRequest): """Request handler for searching transaction records. @@ -153,6 +155,7 @@ async def transactions_list(request: web.BaseRequest): @docs(tags=["endorse-transaction"], summary="Fetch a single transaction record") @match_info_schema(TranIdMatchInfoSchema()) @response_schema(TransactionRecordSchema(), 200) +@tenant_authentication async def transactions_retrieve(request: web.BaseRequest): """Request handler for fetching a single transaction record. @@ -186,6 +189,7 @@ async def transactions_retrieve(request: web.BaseRequest): @querystring_schema(TranIdMatchInfoSchema()) @request_schema(DateSchema()) @response_schema(TransactionRecordSchema(), 200) +@tenant_authentication async def transaction_create_request(request: web.BaseRequest): """Request handler for creating a new transaction record and request. @@ -276,6 +280,7 @@ async def transaction_create_request(request: web.BaseRequest): @querystring_schema(EndorserDIDInfoSchema()) @match_info_schema(TranIdMatchInfoSchema()) @response_schema(TransactionRecordSchema(), 200) +@tenant_authentication async def endorse_transaction_response(request: web.BaseRequest): """Request handler for creating an endorsed transaction response. @@ -347,6 +352,7 @@ async def endorse_transaction_response(request: web.BaseRequest): ) @match_info_schema(TranIdMatchInfoSchema()) @response_schema(TransactionRecordSchema(), 200) +@tenant_authentication async def refuse_transaction_response(request: web.BaseRequest): """Request handler for creating a refused transaction response. @@ -413,6 +419,7 @@ async def refuse_transaction_response(request: web.BaseRequest): ) @match_info_schema(TranIdMatchInfoSchema()) @response_schema(TransactionRecordSchema(), 200) +@tenant_authentication async def cancel_transaction(request: web.BaseRequest): """Request handler for cancelling a Transaction request. @@ -477,6 +484,7 @@ async def cancel_transaction(request: web.BaseRequest): ) @match_info_schema(TranIdMatchInfoSchema()) @response_schema(TransactionRecordSchema(), 200) +@tenant_authentication async def transaction_resend(request: web.BaseRequest): """Request handler for resending a transaction request. @@ -541,6 +549,7 @@ async def transaction_resend(request: web.BaseRequest): @querystring_schema(AssignTransactionJobsSchema()) @match_info_schema(TransactionConnIdMatchInfoSchema()) @response_schema(TransactionJobsSchema(), 200) +@tenant_authentication async def set_endorser_role(request: web.BaseRequest): """Request handler for assigning transaction jobs. @@ -581,6 +590,7 @@ async def set_endorser_role(request: web.BaseRequest): @querystring_schema(EndorserInfoSchema()) @match_info_schema(TransactionConnIdMatchInfoSchema()) @response_schema(EndorserInfoSchema(), 200) +@tenant_authentication async def set_endorser_info(request: web.BaseRequest): """Request handler for assigning endorser information. @@ -644,6 +654,7 @@ async def set_endorser_info(request: web.BaseRequest): ) @match_info_schema(TranIdMatchInfoSchema()) @response_schema(TransactionRecordSchema(), 200) +@tenant_authentication async def transaction_write(request: web.BaseRequest): """Request handler for writing an endorsed transaction to the ledger. diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_manager.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_manager.py index f2cbb7aa2d..87ea1ceff7 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_manager.py @@ -1,8 +1,9 @@ import asyncio import json -import uuid from unittest import IsolatedAsyncioTestCase +from uuid_utils import uuid4 + from .....admin.request_context import AdminRequestContext from .....anoncreds.default.legacy_indy.registry import LegacyIndyRegistry from .....anoncreds.issuer import AnonCredsIssuer @@ -278,7 +279,7 @@ async def test_receive_request(self): "author_goal_code": TransactionRecord.WRITE_TRANSACTION, } mock_request.messages_attach = { - "@id": str(uuid.uuid4()), + "@id": str(uuid4()), "mime-type": "application/json", "data": {"json": self.test_messages_attach}, } diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_routes.py index ad79131aa4..d924b93216 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_routes.py @@ -1,7 +1,7 @@ import asyncio import json - from unittest import IsolatedAsyncioTestCase + from aries_cloudagent.tests import mock from .....connections.models.conn_record import ConnRecord @@ -23,7 +23,11 @@ class TestEndorseTransactionRoutes(IsolatedAsyncioTestCase): async def asyncSetUp(self): - self.profile = InMemoryProfile.test_profile() + self.profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) self.context = self.profile.context setattr(self.context, "profile", self.profile) self.session = await self.profile.session() @@ -67,6 +71,7 @@ async def asyncSetUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) self.test_did = "sample-did" diff --git a/aries_cloudagent/protocols/introduction/v0_1/routes.py b/aries_cloudagent/protocols/introduction/v0_1/routes.py index ed1e9ea226..591b14811c 100644 --- a/aries_cloudagent/protocols/introduction/v0_1/routes.py +++ b/aries_cloudagent/protocols/introduction/v0_1/routes.py @@ -5,9 +5,9 @@ from aiohttp import web from aiohttp_apispec import docs, match_info_schema, querystring_schema, response_schema - from marshmallow import fields +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext from ....messaging.models.openapi import OpenAPISchema from ....messaging.valid import UUID4_EXAMPLE @@ -53,6 +53,7 @@ class IntroConnIdMatchInfoSchema(OpenAPISchema): @match_info_schema(IntroConnIdMatchInfoSchema()) @querystring_schema(IntroStartQueryStringSchema()) @response_schema(IntroModuleResponseSchema, description="") +@tenant_authentication async def introduction_start(request: web.BaseRequest): """Request handler for starting an introduction. diff --git a/aries_cloudagent/protocols/introduction/v0_1/tests/test_routes.py b/aries_cloudagent/protocols/introduction/v0_1/tests/test_routes.py index 9ace9b497a..aa5b64437d 100644 --- a/aries_cloudagent/protocols/introduction/v0_1/tests/test_routes.py +++ b/aries_cloudagent/protocols/introduction/v0_1/tests/test_routes.py @@ -1,15 +1,21 @@ -from aries_cloudagent.tests import mock from unittest import IsolatedAsyncioTestCase -from .....admin.request_context import AdminRequestContext +from aries_cloudagent.tests import mock +from .....admin.request_context import AdminRequestContext +from .....core.in_memory import InMemoryProfile from .. import routes as test_module class TestIntroductionRoutes(IsolatedAsyncioTestCase): async def asyncSetUp(self): self.session_inject = {} - self.context = AdminRequestContext.test_context(self.session_inject) + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + self.context = AdminRequestContext.test_context(self.session_inject, profile) self.request_dict = { "context": self.context, "outbound_message_router": mock.CoroutineMock(), @@ -19,6 +25,7 @@ async def asyncSetUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_introduction_start_no_service(self): diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/routes.py b/aries_cloudagent/protocols/issue_credential/v1_0/routes.py index 3d3b68c3d9..e05e039bea 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/routes.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/routes.py @@ -12,6 +12,7 @@ ) from marshmallow import fields, validate +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext from ....connections.models.conn_record import ConnRecord from ....core.profile import Profile @@ -381,6 +382,7 @@ class V10CredentialExchangeAutoRemoveRequestSchema(OpenAPISchema): ) @querystring_schema(V10CredentialExchangeListQueryStringSchema) @response_schema(V10CredentialExchangeListResultSchema(), 200, description="") +@tenant_authentication async def credential_exchange_list(request: web.BaseRequest): """Request handler for searching credential exchange records. @@ -422,6 +424,7 @@ async def credential_exchange_list(request: web.BaseRequest): ) @match_info_schema(CredExIdMatchInfoSchema()) @response_schema(V10CredentialExchangeSchema(), 200, description="") +@tenant_authentication async def credential_exchange_retrieve(request: web.BaseRequest): """Request handler for fetching single credential exchange record. @@ -469,6 +472,7 @@ async def credential_exchange_retrieve(request: web.BaseRequest): ) @request_schema(V10CredentialCreateSchema()) @response_schema(V10CredentialExchangeSchema(), 200, description="") +@tenant_authentication async def credential_exchange_create(request: web.BaseRequest): """Request handler for creating a credential from attr values. @@ -548,6 +552,7 @@ async def credential_exchange_create(request: web.BaseRequest): ) @request_schema(V10CredentialProposalRequestMandSchema()) @response_schema(V10CredentialExchangeSchema(), 200, description="") +@tenant_authentication async def credential_exchange_send(request: web.BaseRequest): """Request handler for sending credential from issuer to holder from attr values. @@ -650,6 +655,7 @@ async def credential_exchange_send(request: web.BaseRequest): ) @request_schema(V10CredentialProposalRequestOptSchema()) @response_schema(V10CredentialExchangeSchema(), 200, description="") +@tenant_authentication async def credential_exchange_send_proposal(request: web.BaseRequest): """Request handler for sending credential proposal. @@ -773,6 +779,7 @@ async def _create_free_offer( ) @request_schema(V10CredentialConnFreeOfferRequestSchema()) @response_schema(V10CredentialExchangeSchema(), 200, description="") +@tenant_authentication async def credential_exchange_create_free_offer(request: web.BaseRequest): """Request handler for creating free credential offer. @@ -847,6 +854,7 @@ async def credential_exchange_create_free_offer(request: web.BaseRequest): ) @request_schema(V10CredentialFreeOfferRequestSchema()) @response_schema(V10CredentialExchangeSchema(), 200, description="") +@tenant_authentication async def credential_exchange_send_free_offer(request: web.BaseRequest): """Request handler for sending free credential offer. @@ -937,6 +945,7 @@ async def credential_exchange_send_free_offer(request: web.BaseRequest): @match_info_schema(CredExIdMatchInfoSchema()) @request_schema(V10CredentialBoundOfferRequestSchema()) @response_schema(V10CredentialExchangeSchema(), 200, description="") +@tenant_authentication async def credential_exchange_send_bound_offer(request: web.BaseRequest): """Request handler for sending bound credential offer. @@ -1037,6 +1046,7 @@ async def credential_exchange_send_bound_offer(request: web.BaseRequest): @match_info_schema(CredExIdMatchInfoSchema()) @request_schema(V10CredentialExchangeAutoRemoveRequestSchema()) @response_schema(V10CredentialExchangeSchema(), 200, description="") +@tenant_authentication async def credential_exchange_send_request(request: web.BaseRequest): """Request handler for sending credential request. @@ -1153,6 +1163,7 @@ async def credential_exchange_send_request(request: web.BaseRequest): @match_info_schema(CredExIdMatchInfoSchema()) @request_schema(V10CredentialIssueRequestSchema()) @response_schema(V10CredentialExchangeSchema(), 200, description="") +@tenant_authentication async def credential_exchange_issue(request: web.BaseRequest): """Request handler for sending credential. @@ -1249,6 +1260,7 @@ async def credential_exchange_issue(request: web.BaseRequest): @match_info_schema(CredExIdMatchInfoSchema()) @request_schema(V10CredentialStoreRequestSchema()) @response_schema(V10CredentialExchangeSchema(), 200, description="") +@tenant_authentication async def credential_exchange_store(request: web.BaseRequest): """Request handler for storing credential. @@ -1354,6 +1366,7 @@ async def credential_exchange_store(request: web.BaseRequest): @match_info_schema(CredExIdMatchInfoSchema()) @request_schema(V10CredentialProblemReportRequestSchema()) @response_schema(IssueCredentialModuleResponseSchema(), 200, description="") +@tenant_authentication async def credential_exchange_problem_report(request: web.BaseRequest): """Request handler for sending problem report. @@ -1400,6 +1413,7 @@ async def credential_exchange_problem_report(request: web.BaseRequest): ) @match_info_schema(CredExIdMatchInfoSchema()) @response_schema(IssueCredentialModuleResponseSchema(), 200, description="") +@tenant_authentication async def credential_exchange_remove(request: web.BaseRequest): """Request handler for removing a credential exchange record. diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_routes.py index e100a50a58..01c06e76a5 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_routes.py @@ -1,18 +1,23 @@ -from aries_cloudagent.tests import mock from unittest import IsolatedAsyncioTestCase +from aries_cloudagent.tests import mock + from .....admin.request_context import AdminRequestContext +from .....core.in_memory import InMemoryProfile from .....wallet.base import BaseWallet - from .. import routes as test_module - from . import CRED_DEF_ID class TestCredentialRoutes(IsolatedAsyncioTestCase): async def asyncSetUp(self): self.session_inject = {} - self.context = AdminRequestContext.test_context(self.session_inject) + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + self.context = AdminRequestContext.test_context(self.session_inject, profile) self.request_dict = { "context": self.context, "outbound_message_router": mock.CoroutineMock(), @@ -22,6 +27,7 @@ async def asyncSetUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_credential_exchange_list(self): diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/models/cred_request.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/models/cred_request.py index 9c7c156391..7845b7676b 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/models/cred_request.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/models/cred_request.py @@ -6,7 +6,6 @@ INDY_CRED_DEF_ID_EXAMPLE, INDY_CRED_DEF_ID_VALIDATE, INDY_DID_EXAMPLE, - INDY_DID_VALIDATE, NUM_STR_WHOLE_EXAMPLE, NUM_STR_WHOLE_VALIDATE, ) diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/messages/cred_format.py b/aries_cloudagent/protocols/issue_credential/v2_0/messages/cred_format.py index a9d6253ce7..cb520ea123 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/messages/cred_format.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/messages/cred_format.py @@ -3,9 +3,9 @@ from collections import namedtuple from enum import Enum from typing import TYPE_CHECKING, Mapping, Sequence, Type, Union -from uuid import uuid4 from marshmallow import EXCLUDE, fields +from uuid_utils import uuid4 from .....messaging.decorators.attach_decorator import AttachDecorator from .....messaging.models.base import BaseModel, BaseModelSchema diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/routes.py b/aries_cloudagent/protocols/issue_credential/v2_0/routes.py index a966b63690..bf15b91df2 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/routes.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/routes.py @@ -14,6 +14,7 @@ ) from marshmallow import ValidationError, fields, validate, validates_schema +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext from ....anoncreds.holder import AnonCredsHolderError from ....anoncreds.issuer import AnonCredsIssuerError @@ -543,6 +544,7 @@ def _format_result_with_details( ) @querystring_schema(V20CredExRecordListQueryStringSchema) @response_schema(V20CredExRecordListResultSchema(), 200, description="") +@tenant_authentication async def credential_exchange_list(request: web.BaseRequest): """Request handler for searching credential exchange records. @@ -590,6 +592,7 @@ async def credential_exchange_list(request: web.BaseRequest): ) @match_info_schema(V20CredExIdMatchInfoSchema()) @response_schema(V20CredExRecordDetailSchema(), 200, description="") +@tenant_authentication async def credential_exchange_retrieve(request: web.BaseRequest): """Request handler for fetching single credential exchange record. @@ -637,6 +640,7 @@ async def credential_exchange_retrieve(request: web.BaseRequest): ) @request_schema(V20IssueCredSchemaCore()) @response_schema(V20CredExRecordSchema(), 200, description="") +@tenant_authentication async def credential_exchange_create(request: web.BaseRequest): """Request handler for creating a credential from attr values. @@ -713,6 +717,7 @@ async def credential_exchange_create(request: web.BaseRequest): ) @request_schema(V20CredExFreeSchema()) @response_schema(V20CredExRecordSchema(), 200, description="") +@tenant_authentication async def credential_exchange_send(request: web.BaseRequest): """Request handler for sending credential from issuer to holder from attr values. @@ -829,6 +834,7 @@ async def credential_exchange_send(request: web.BaseRequest): ) @request_schema(V20CredExFreeSchema()) @response_schema(V20CredExRecordSchema(), 200, description="") +@tenant_authentication async def credential_exchange_send_proposal(request: web.BaseRequest): """Request handler for sending credential proposal. @@ -955,6 +961,7 @@ async def _create_free_offer( ) @request_schema(V20CredOfferConnFreeRequestSchema()) @response_schema(V20CredExRecordSchema(), 200, description="") +@tenant_authentication async def credential_exchange_create_free_offer(request: web.BaseRequest): """Request handler for creating free credential offer. @@ -1027,6 +1034,7 @@ async def credential_exchange_create_free_offer(request: web.BaseRequest): ) @request_schema(V20CredOfferRequestSchema()) @response_schema(V20CredExRecordSchema(), 200, description="") +@tenant_authentication async def credential_exchange_send_free_offer(request: web.BaseRequest): """Request handler for sending free credential offer. @@ -1119,6 +1127,7 @@ async def credential_exchange_send_free_offer(request: web.BaseRequest): @match_info_schema(V20CredExIdMatchInfoSchema()) @request_schema(V20CredBoundOfferRequestSchema()) @response_schema(V20CredExRecordSchema(), 200, description="") +@tenant_authentication async def credential_exchange_send_bound_offer(request: web.BaseRequest): """Request handler for sending bound credential offer. @@ -1230,6 +1239,7 @@ async def credential_exchange_send_bound_offer(request: web.BaseRequest): ) @request_schema(V20CredRequestFreeSchema()) @response_schema(V20CredExRecordSchema(), 200, description="") +@tenant_authentication async def credential_exchange_send_free_request(request: web.BaseRequest): """Request handler for sending free credential request. @@ -1328,6 +1338,7 @@ async def credential_exchange_send_free_request(request: web.BaseRequest): @match_info_schema(V20CredExIdMatchInfoSchema()) @request_schema(V20CredRequestRequestSchema()) @response_schema(V20CredExRecordSchema(), 200, description="") +@tenant_authentication async def credential_exchange_send_bound_request(request: web.BaseRequest): """Request handler for sending credential request. @@ -1447,6 +1458,7 @@ async def credential_exchange_send_bound_request(request: web.BaseRequest): @match_info_schema(V20CredExIdMatchInfoSchema()) @request_schema(V20CredIssueRequestSchema()) @response_schema(V20CredExRecordDetailSchema(), 200, description="") +@tenant_authentication async def credential_exchange_issue(request: web.BaseRequest): """Request handler for sending credential. @@ -1541,6 +1553,7 @@ async def credential_exchange_issue(request: web.BaseRequest): @match_info_schema(V20CredExIdMatchInfoSchema()) @request_schema(V20CredStoreRequestSchema()) @response_schema(V20CredExRecordDetailSchema(), 200, description="") +@tenant_authentication async def credential_exchange_store(request: web.BaseRequest): """Request handler for storing credential. @@ -1644,6 +1657,7 @@ async def credential_exchange_store(request: web.BaseRequest): ) @match_info_schema(V20CredExIdMatchInfoSchema()) @response_schema(V20IssueCredentialModuleResponseSchema(), 200, description="") +@tenant_authentication async def credential_exchange_remove(request: web.BaseRequest): """Request handler for removing a credential exchange record. @@ -1672,6 +1686,7 @@ async def credential_exchange_remove(request: web.BaseRequest): @match_info_schema(V20CredExIdMatchInfoSchema()) @request_schema(V20CredIssueProblemReportRequestSchema()) @response_schema(V20IssueCredentialModuleResponseSchema(), 200, description="") +@tenant_authentication async def credential_exchange_problem_report(request: web.BaseRequest): """Request handler for sending problem report. diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_routes.py b/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_routes.py index 96b282eff7..e25e088635 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_routes.py @@ -1,14 +1,14 @@ -from .....vc.ld_proofs.error import LinkedDataProofException -from aries_cloudagent.tests import mock from unittest import IsolatedAsyncioTestCase -from .....admin.request_context import AdminRequestContext +from aries_cloudagent.tests import mock +from .....admin.request_context import AdminRequestContext +from .....core.in_memory import InMemoryProfile +from .....vc.ld_proofs.error import LinkedDataProofException from .. import routes as test_module from ..formats.indy.handler import IndyCredFormatHandler from ..formats.ld_proof.handler import LDProofCredFormatHandler from ..messages.cred_format import V20CredFormat - from . import ( LD_PROOF_VC_DETAIL, TEST_DID, @@ -18,7 +18,12 @@ class TestV20CredRoutes(IsolatedAsyncioTestCase): async def asyncSetUp(self): self.session_inject = {} - self.context = AdminRequestContext.test_context(self.session_inject) + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + self.context = AdminRequestContext.test_context(self.session_inject, profile) self.request_dict = { "context": self.context, "outbound_message_router": mock.CoroutineMock(), @@ -28,6 +33,7 @@ async def asyncSetUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_validate_cred_filter_schema(self): diff --git a/aries_cloudagent/protocols/out_of_band/v1_0/manager.py b/aries_cloudagent/protocols/out_of_band/v1_0/manager.py index 37577c3999..e1dff21923 100644 --- a/aries_cloudagent/protocols/out_of_band/v1_0/manager.py +++ b/aries_cloudagent/protocols/out_of_band/v1_0/manager.py @@ -4,7 +4,8 @@ import logging import re from typing import List, Mapping, NamedTuple, Optional, Sequence, Text, Union -import uuid + +from uuid_utils import uuid4 from aries_cloudagent.protocols.coordinate_mediation.v1_0.route_manager import ( RouteManager, @@ -42,8 +43,8 @@ from .messages.problem_report import OOBProblemReport from .messages.reuse import HandshakeReuse from .messages.reuse_accept import HandshakeReuseAccept -from .messages.service import Service as ServiceMessage from .messages.service import Service +from .messages.service import Service as ServiceMessage from .models.invitation import InvitationRecord from .models.oob_record import OobRecord @@ -145,7 +146,7 @@ def __init__( self.route_manager = route_manager self.oob = oob - self.msg_id = str(uuid.uuid4()) + self.msg_id = str(uuid4()) self.attachments = attachments self.handshake_protocols = [ diff --git a/aries_cloudagent/protocols/out_of_band/v1_0/routes.py b/aries_cloudagent/protocols/out_of_band/v1_0/routes.py index 96aeea265a..7fcd42c1e4 100644 --- a/aries_cloudagent/protocols/out_of_band/v1_0/routes.py +++ b/aries_cloudagent/protocols/out_of_band/v1_0/routes.py @@ -6,14 +6,15 @@ from aiohttp import web from aiohttp_apispec import ( docs, + match_info_schema, querystring_schema, request_schema, - match_info_schema, response_schema, ) from marshmallow import fields, validate from marshmallow.exceptions import ValidationError +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext from ....messaging.models.base import BaseModelError from ....messaging.models.openapi import OpenAPISchema @@ -225,6 +226,7 @@ class InvitationRecordMatchInfoSchema(OpenAPISchema): @querystring_schema(InvitationCreateQueryStringSchema()) @request_schema(InvitationCreateRequestSchema()) @response_schema(InvitationRecordSchema(), description="") +@tenant_authentication async def invitation_create(request: web.BaseRequest): """Request handler for creating a new connection invitation. @@ -293,6 +295,7 @@ async def invitation_create(request: web.BaseRequest): @querystring_schema(InvitationReceiveQueryStringSchema()) @request_schema(InvitationMessageSchema()) @response_schema(OobRecordSchema(), 200, description="") +@tenant_authentication async def invitation_receive(request: web.BaseRequest): """Request handler for receiving a new connection invitation. @@ -337,6 +340,7 @@ async def invitation_receive(request: web.BaseRequest): @docs(tags=["out-of-band"], summary="Delete records associated with invitation") @match_info_schema(InvitationRecordMatchInfoSchema()) @response_schema(InvitationRecordResponseSchema(), description="") +@tenant_authentication async def invitation_remove(request: web.BaseRequest): """Request handler for removing a invitation related conn and oob records. diff --git a/aries_cloudagent/protocols/out_of_band/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/out_of_band/v1_0/tests/test_routes.py index 7a9384f1cc..fa61be97a2 100644 --- a/aries_cloudagent/protocols/out_of_band/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/out_of_band/v1_0/tests/test_routes.py @@ -1,16 +1,20 @@ from unittest import IsolatedAsyncioTestCase + from aries_cloudagent.tests import mock from .....admin.request_context import AdminRequestContext from .....connections.models.conn_record import ConnRecord from .....core.in_memory import InMemoryProfile - from .. import routes as test_module class TestOutOfBandRoutes(IsolatedAsyncioTestCase): async def asyncSetUp(self): - self.profile = InMemoryProfile.test_profile() + self.profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) self.context = AdminRequestContext.test_context(profile=self.profile) self.request_dict = { "context": self.context, @@ -21,6 +25,7 @@ async def asyncSetUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_invitation_create(self): diff --git a/aries_cloudagent/protocols/present_proof/dif/pres_exch.py b/aries_cloudagent/protocols/present_proof/dif/pres_exch.py index 08e6064c56..72d7eef25e 100644 --- a/aries_cloudagent/protocols/present_proof/dif/pres_exch.py +++ b/aries_cloudagent/protocols/present_proof/dif/pres_exch.py @@ -220,7 +220,7 @@ class Meta: @pre_load def extract_info(self, data, **kwargs): - """deserialize.""" + """Deserialize.""" new_data = {} if isinstance(data, dict): if "uri_groups" in data: @@ -827,12 +827,10 @@ class Meta: id = fields.Str( required=False, - validate=UUID4_VALIDATE, metadata={"description": "ID", "example": UUID4_EXAMPLE}, ) definition_id = fields.Str( required=False, - validate=UUID4_VALIDATE, metadata={"description": "DefinitionID", "example": UUID4_EXAMPLE}, ) descriptor_maps = fields.List( diff --git a/aries_cloudagent/protocols/present_proof/dif/pres_exch_handler.py b/aries_cloudagent/protocols/present_proof/dif/pres_exch_handler.py index e5d0aa7509..8ccc271fd9 100644 --- a/aries_cloudagent/protocols/present_proof/dif/pres_exch_handler.py +++ b/aries_cloudagent/protocols/present_proof/dif/pres_exch_handler.py @@ -8,55 +8,52 @@ returns VerifiablePresentation """ -import pytz -import re import logging - +import re from datetime import datetime -from dateutil.parser import parse as dateutil_parser +from typing import Dict, List, Optional, Sequence, Tuple, Union + +import pytz from dateutil.parser import ParserError +from dateutil.parser import parse as dateutil_parser from jsonpath_ng import parse from pyld import jsonld from pyld.jsonld import JsonLdProcessor -from typing import Sequence, Optional, Tuple, Union, Dict, List from unflatten import unflatten -from uuid import uuid4 +from uuid_utils import uuid4 from ....core.error import BaseError from ....core.profile import Profile from ....storage.vc_holder.vc_record import VCRecord from ....vc.ld_proofs import ( - Ed25519Signature2018, - Ed25519Signature2020, BbsBlsSignature2020, BbsBlsSignatureProof2020, - WalletKeyPair, DocumentLoader, + Ed25519Signature2018, + Ed25519Signature2020, + WalletKeyPair, ) from ....vc.ld_proofs.constants import ( - SECURITY_CONTEXT_BBS_URL, EXPANDED_TYPE_CREDENTIALS_CONTEXT_V1_VC_TYPE, + SECURITY_CONTEXT_BBS_URL, ) -from ....vc.vc_ld.prove import sign_presentation, create_presentation, derive_credential +from ....vc.vc_ld.prove import create_presentation, derive_credential, sign_presentation from ....wallet.base import BaseWallet, DIDInfo -from ....wallet.default_verification_key_strategy import ( - BaseVerificationKeyStrategy, -) +from ....wallet.default_verification_key_strategy import BaseVerificationKeyStrategy from ....wallet.error import WalletError, WalletNotFoundError from ....wallet.key_type import BLS12381G2, ED25519 - from .pres_exch import ( - PresentationDefinition, - InputDescriptors, + Constraints, DIFField, Filter, - Constraints, - SubmissionRequirements, + InputDescriptorMapping, + InputDescriptors, + PresentationDefinition, + PresentationSubmission, Requirement, SchemaInputDescriptor, SchemasInputDescriptorFilter, - InputDescriptorMapping, - PresentationSubmission, + SubmissionRequirements, ) PRESENTATION_SUBMISSION_JSONLD_CONTEXT = ( diff --git a/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch_handler.py b/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch_handler.py index 05ead98dc6..75b9dac6e4 100644 --- a/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch_handler.py +++ b/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch_handler.py @@ -1,63 +1,54 @@ from copy import deepcopy from datetime import datetime from typing import Sequence -from uuid import uuid4 -from aries_cloudagent.tests import mock import pytest +from uuid_utils import uuid4 +from aries_cloudagent.tests import mock from aries_cloudagent.wallet.key_type import BLS12381G2, ED25519 from .....core.in_memory import InMemoryProfile from .....resolver.did_resolver import DIDResolver from .....storage.vc_holder.vc_record import VCRecord +from .....vc.ld_proofs import BbsBlsSignature2020 +from .....vc.ld_proofs.constants import SECURITY_CONTEXT_BBS_URL +from .....vc.ld_proofs.document_loader import DocumentLoader +from .....vc.ld_proofs.error import LinkedDataProofException +from .....vc.tests.data import BBS_SIGNED_VC_MATTR +from .....vc.tests.document_loader import custom_document_loader from .....wallet.base import BaseWallet, DIDInfo from .....wallet.default_verification_key_strategy import ( - DefaultVerificationKeyStrategy, BaseVerificationKeyStrategy, + DefaultVerificationKeyStrategy, ) -from .....wallet.did_method import SOV, KEY, DIDMethods +from .....wallet.did_method import KEY, SOV, DIDMethods from .....wallet.error import WalletNotFoundError -from .....vc.ld_proofs import ( - BbsBlsSignature2020, -) -from .....vc.ld_proofs.document_loader import DocumentLoader -from .....vc.ld_proofs.error import LinkedDataProofException -from .....vc.ld_proofs.constants import SECURITY_CONTEXT_BBS_URL -from .....vc.tests.document_loader import custom_document_loader -from .....vc.tests.data import ( - BBS_SIGNED_VC_MATTR, -) - from .. import pres_exch_handler as test_module from ..pres_exch import ( + Constraints, + DIFField, + Filter, PresentationDefinition, Requirement, - Filter, SchemaInputDescriptor, SchemasInputDescriptorFilter, - Constraints, - DIFField, -) -from ..pres_exch_handler import ( - DIFPresExchHandler, - DIFPresExchError, ) - +from ..pres_exch_handler import DIFPresExchError, DIFPresExchHandler from .test_data import ( - get_test_data, - edd_jsonld_creds, + EXPANDED_CRED_FHIR_TYPE_1, + EXPANDED_CRED_FHIR_TYPE_2, + TEST_CRED_DICT, + TEST_CRED_WILDCARD, bbs_bls_number_filter_creds, - bbs_signed_cred_no_credsubjectid, bbs_signed_cred_credsubjectid, + bbs_signed_cred_no_credsubjectid, creds_with_no_id, + edd_jsonld_creds, + get_test_data, is_holder_pd, is_holder_pd_multiple_fields_excluded, is_holder_pd_multiple_fields_included, - EXPANDED_CRED_FHIR_TYPE_1, - EXPANDED_CRED_FHIR_TYPE_2, - TEST_CRED_DICT, - TEST_CRED_WILDCARD, ) diff --git a/aries_cloudagent/protocols/present_proof/v1_0/routes.py b/aries_cloudagent/protocols/present_proof/v1_0/routes.py index a606e0ff89..3cf4ae38ee 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/routes.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/routes.py @@ -10,9 +10,9 @@ request_schema, response_schema, ) - from marshmallow import fields, validate +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext from ....connections.models.conn_record import ConnRecord from ....indy.holder import IndyHolder, IndyHolderError @@ -289,6 +289,7 @@ class V10PresExIdMatchInfoSchema(OpenAPISchema): ) @querystring_schema(V10PresentationExchangeListQueryStringSchema) @response_schema(V10PresentationExchangeListSchema(), 200, description="") +@tenant_authentication async def presentation_exchange_list(request: web.BaseRequest): """Request handler for searching presentation exchange records. @@ -330,6 +331,7 @@ async def presentation_exchange_list(request: web.BaseRequest): ) @match_info_schema(V10PresExIdMatchInfoSchema()) @response_schema(V10PresentationExchangeSchema(), 200, description="") +@tenant_authentication async def presentation_exchange_retrieve(request: web.BaseRequest): """Request handler for fetching a single presentation exchange record. @@ -379,6 +381,7 @@ async def presentation_exchange_retrieve(request: web.BaseRequest): @match_info_schema(V10PresExIdMatchInfoSchema()) @querystring_schema(CredentialsFetchQueryStringSchema()) @response_schema(IndyCredPrecisSchema(many=True), 200, description="") +@tenant_authentication async def presentation_exchange_credentials_list(request: web.BaseRequest): """Request handler for searching applicable credential records. @@ -459,6 +462,7 @@ async def presentation_exchange_credentials_list(request: web.BaseRequest): ) @request_schema(V10PresentationProposalRequestSchema()) @response_schema(V10PresentationExchangeSchema(), 200, description="") +@tenant_authentication async def presentation_exchange_send_proposal(request: web.BaseRequest): """Request handler for sending a presentation proposal. @@ -543,6 +547,7 @@ async def presentation_exchange_send_proposal(request: web.BaseRequest): ) @request_schema(V10PresentationCreateRequestRequestSchema()) @response_schema(V10PresentationExchangeSchema(), 200, description="") +@tenant_authentication async def presentation_exchange_create_request(request: web.BaseRequest): """Request handler for creating a free presentation request. @@ -621,6 +626,7 @@ async def presentation_exchange_create_request(request: web.BaseRequest): ) @request_schema(V10PresentationSendRequestRequestSchema()) @response_schema(V10PresentationExchangeSchema(), 200, description="") +@tenant_authentication async def presentation_exchange_send_free_request(request: web.BaseRequest): """Request handler for sending a presentation request free from any proposal. @@ -710,6 +716,7 @@ async def presentation_exchange_send_free_request(request: web.BaseRequest): @match_info_schema(V10PresExIdMatchInfoSchema()) @request_schema(V10PresentationSendRequestToProposalSchema()) @response_schema(V10PresentationExchangeSchema(), 200, description="") +@tenant_authentication async def presentation_exchange_send_bound_request(request: web.BaseRequest): """Request handler for sending a presentation request bound to a proposal. @@ -806,6 +813,7 @@ async def presentation_exchange_send_bound_request(request: web.BaseRequest): @match_info_schema(V10PresExIdMatchInfoSchema()) @request_schema(V10PresentationSendRequestSchema()) @response_schema(V10PresentationExchangeSchema(), description="") +@tenant_authentication async def presentation_exchange_send_presentation(request: web.BaseRequest): """Request handler for sending a presentation. @@ -923,6 +931,7 @@ async def presentation_exchange_send_presentation(request: web.BaseRequest): ) @match_info_schema(V10PresExIdMatchInfoSchema()) @response_schema(V10PresentationExchangeSchema(), description="") +@tenant_authentication async def presentation_exchange_verify_presentation(request: web.BaseRequest): """Request handler for verifying a presentation request. @@ -998,6 +1007,7 @@ async def presentation_exchange_verify_presentation(request: web.BaseRequest): @match_info_schema(V10PresExIdMatchInfoSchema()) @request_schema(V10PresentationProblemReportRequestSchema()) @response_schema(V10PresentProofModuleResponseSchema(), 200, description="") +@tenant_authentication async def presentation_exchange_problem_report(request: web.BaseRequest): """Request handler for sending problem report. @@ -1039,6 +1049,7 @@ async def presentation_exchange_problem_report(request: web.BaseRequest): ) @match_info_schema(V10PresExIdMatchInfoSchema()) @response_schema(V10PresentProofModuleResponseSchema(), description="") +@tenant_authentication async def presentation_exchange_remove(request: web.BaseRequest): """Request handler for removing a presentation exchange record. diff --git a/aries_cloudagent/protocols/present_proof/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/present_proof/v1_0/tests/test_routes.py index ca3d8e6927..9b5889b973 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/tests/test_routes.py @@ -1,23 +1,28 @@ import importlib - -from aries_cloudagent.tests import mock from unittest import IsolatedAsyncioTestCase from marshmallow import ValidationError +from aries_cloudagent.tests import mock + from .....admin.request_context import AdminRequestContext +from .....core.in_memory import InMemoryProfile from .....indy.holder import IndyHolder from .....indy.models.proof_request import IndyProofReqAttrSpecSchema from .....indy.verifier import IndyVerifier from .....ledger.base import BaseLedger from .....storage.error import StorageNotFoundError - from .. import routes as test_module class TestProofRoutes(IsolatedAsyncioTestCase): def setUp(self): - self.context = AdminRequestContext.test_context() + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + self.context = AdminRequestContext.test_context(profile=profile) self.profile = self.context.profile self.request_dict = { "context": self.context, @@ -28,6 +33,7 @@ def setUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_validate_proof_req_attr_spec(self): diff --git a/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/handler.py b/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/handler.py index 86d4c4451e..9e8fb2d6bf 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/handler.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/handler.py @@ -3,9 +3,8 @@ import json import logging from typing import Mapping, Optional, Sequence, Tuple -from uuid import uuid4 -from marshmallow import RAISE +from uuid_utils import uuid4 from aries_cloudagent.anoncreds.holder import AnonCredsHolder @@ -77,7 +76,7 @@ def validate_fields(cls, message_type: str, attachment_data: Mapping): Schema = mapping[message_type] # Validate, throw if not valid - Schema(unknown=RAISE).load(attachment_data) + Schema().load(attachment_data) def get_format_identifier(self, message_type: str) -> str: """Get attachment format identifier for format and message combination. diff --git a/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_format.py b/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_format.py index 7de8d98794..2c25a92d7f 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_format.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_format.py @@ -3,9 +3,9 @@ from collections import namedtuple from enum import Enum from typing import TYPE_CHECKING, Mapping, Sequence, Type, Union -from uuid import uuid4 from marshmallow import EXCLUDE, fields +from uuid_utils import uuid4 from .....messaging.decorators.attach_decorator import AttachDecorator from .....messaging.models.base import BaseModel, BaseModelSchema diff --git a/aries_cloudagent/protocols/present_proof/v2_0/routes.py b/aries_cloudagent/protocols/present_proof/v2_0/routes.py index 4251162649..b086f2d1a2 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/routes.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/routes.py @@ -11,12 +11,12 @@ request_schema, response_schema, ) - from marshmallow import ValidationError, fields, validate, validates_schema +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext -from ....connections.models.conn_record import ConnRecord from ....anoncreds.holder import AnonCredsHolder, AnonCredsHolderError +from ....connections.models.conn_record import ConnRecord from ....indy.holder import IndyHolder, IndyHolderError from ....indy.models.cred_precis import IndyCredPrecisSchema from ....indy.models.proof import IndyPresSpecSchema @@ -425,6 +425,7 @@ def _formats_attach(by_format: Mapping, msg_type: str, spec: str) -> Mapping: @docs(tags=["present-proof v2.0"], summary="Fetch all present-proof exchange records") @querystring_schema(V20PresExRecordListQueryStringSchema) @response_schema(V20PresExRecordListSchema(), 200, description="") +@tenant_authentication async def present_proof_list(request: web.BaseRequest): """Request handler for searching presentation exchange records. @@ -467,6 +468,7 @@ async def present_proof_list(request: web.BaseRequest): ) @match_info_schema(V20PresExIdMatchInfoSchema()) @response_schema(V20PresExRecordSchema(), 200, description="") +@tenant_authentication async def present_proof_retrieve(request: web.BaseRequest): """Request handler for fetching a single presentation exchange record. @@ -513,6 +515,7 @@ async def present_proof_retrieve(request: web.BaseRequest): @match_info_schema(V20PresExIdMatchInfoSchema()) @querystring_schema(V20CredentialsFetchQueryStringSchema()) @response_schema(IndyCredPrecisSchema(many=True), 200, description="") +@tenant_authentication async def present_proof_credentials_list(request: web.BaseRequest): """Request handler for searching applicable credential records. @@ -809,6 +812,7 @@ async def retrieve_uri_list_from_schema_filter( @docs(tags=["present-proof v2.0"], summary="Sends a presentation proposal") @request_schema(V20PresProposalRequestSchema()) @response_schema(V20PresExRecordSchema(), 200, description="") +@tenant_authentication async def present_proof_send_proposal(request: web.BaseRequest): """Request handler for sending a presentation proposal. @@ -891,6 +895,7 @@ async def present_proof_send_proposal(request: web.BaseRequest): ) @request_schema(V20PresCreateRequestRequestSchema()) @response_schema(V20PresExRecordSchema(), 200, description="") +@tenant_authentication async def present_proof_create_request(request: web.BaseRequest): """Request handler for creating a free presentation request. @@ -967,6 +972,7 @@ async def present_proof_create_request(request: web.BaseRequest): ) @request_schema(V20PresSendRequestRequestSchema()) @response_schema(V20PresExRecordSchema(), 200, description="") +@tenant_authentication async def present_proof_send_free_request(request: web.BaseRequest): """Request handler for sending a presentation request free from any proposal. @@ -1050,6 +1056,7 @@ async def present_proof_send_free_request(request: web.BaseRequest): @match_info_schema(V20PresExIdMatchInfoSchema()) @request_schema(V20PresentationSendRequestToProposalSchema()) @response_schema(V20PresExRecordSchema(), 200, description="") +@tenant_authentication async def present_proof_send_bound_request(request: web.BaseRequest): """Request handler for sending a presentation request bound to a proposal. @@ -1140,6 +1147,7 @@ async def present_proof_send_bound_request(request: web.BaseRequest): @match_info_schema(V20PresExIdMatchInfoSchema()) @request_schema(V20PresSpecByFormatRequestSchema()) @response_schema(V20PresExRecordSchema(), description="") +@tenant_authentication async def present_proof_send_presentation(request: web.BaseRequest): """Request handler for sending a presentation. @@ -1253,6 +1261,7 @@ async def present_proof_send_presentation(request: web.BaseRequest): @docs(tags=["present-proof v2.0"], summary="Verify a received presentation") @match_info_schema(V20PresExIdMatchInfoSchema()) @response_schema(V20PresExRecordSchema(), description="") +@tenant_authentication async def present_proof_verify_presentation(request: web.BaseRequest): """Request handler for verifying a presentation request. @@ -1321,6 +1330,7 @@ async def present_proof_verify_presentation(request: web.BaseRequest): @match_info_schema(V20PresExIdMatchInfoSchema()) @request_schema(V20PresProblemReportRequestSchema()) @response_schema(V20PresentProofModuleResponseSchema(), 200, description="") +@tenant_authentication async def present_proof_problem_report(request: web.BaseRequest): """Request handler for sending problem report. @@ -1359,6 +1369,7 @@ async def present_proof_problem_report(request: web.BaseRequest): ) @match_info_schema(V20PresExIdMatchInfoSchema()) @response_schema(V20PresentProofModuleResponseSchema(), description="") +@tenant_authentication async def present_proof_remove(request: web.BaseRequest): """Request handler for removing a presentation exchange record. diff --git a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes.py b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes.py index 90ccebce43..328b2bf878 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes.py @@ -1,11 +1,14 @@ from copy import deepcopy -from unittest import IsolatedAsyncioTestCase -from aries_cloudagent.tests import mock -from marshmallow import ValidationError from time import time +from unittest import IsolatedAsyncioTestCase from unittest.mock import ANY +from marshmallow import ValidationError + +from aries_cloudagent.tests import mock + from .....admin.request_context import AdminRequestContext +from .....core.in_memory import InMemoryProfile from .....indy.holder import IndyHolder from .....indy.models.proof_request import IndyProofReqAttrSpecSchema from .....indy.verifier import IndyVerifier @@ -13,9 +16,7 @@ from .....storage.error import StorageNotFoundError from .....storage.vc_holder.base import VCHolder from .....storage.vc_holder.vc_record import VCRecord - from ...dif.pres_exch import SchemaInputDescriptor - from .. import routes as test_module from ..messages.pres_format import V20PresFormat from ..models.pres_exchange import V20PresExRecord @@ -126,7 +127,12 @@ class TestPresentProofRoutes(IsolatedAsyncioTestCase): def setUp(self): - self.context = AdminRequestContext.test_context() + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + self.context = AdminRequestContext.test_context(profile=profile) self.profile = self.context.profile injector = self.profile.context.injector @@ -181,6 +187,7 @@ def setUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_validate(self): diff --git a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes_anoncreds.py b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes_anoncreds.py index 4740a46f3e..e79f0e0c74 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes_anoncreds.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes_anoncreds.py @@ -1,22 +1,23 @@ -import pytest from copy import deepcopy -from unittest import IsolatedAsyncioTestCase -from aries_cloudagent.tests import mock -from marshmallow import ValidationError from time import time +from unittest import IsolatedAsyncioTestCase from unittest.mock import ANY +import pytest +from marshmallow import ValidationError + +from aries_cloudagent.tests import mock + from .....admin.request_context import AdminRequestContext from .....anoncreds.holder import AnonCredsHolder -from .....indy.models.proof_request import IndyProofReqAttrSpecSchema from .....anoncreds.verifier import AnonCredsVerifier +from .....core.in_memory import InMemoryProfile +from .....indy.models.proof_request import IndyProofReqAttrSpecSchema from .....ledger.base import BaseLedger from .....storage.error import StorageNotFoundError from .....storage.vc_holder.base import VCHolder from .....storage.vc_holder.vc_record import VCRecord - from ...dif.pres_exch import SchemaInputDescriptor - from .. import routes as test_module from ..messages.pres_format import V20PresFormat from ..models.pres_exchange import V20PresExRecord @@ -127,7 +128,12 @@ class TestPresentProofRoutesAnonCreds(IsolatedAsyncioTestCase): def setUp(self): - self.context = AdminRequestContext.test_context() + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + self.context = AdminRequestContext.test_context(profile=profile) self.context.profile.settings.set_value("wallet.type", "askar-anoncreds") self.profile = self.context.profile injector = self.profile.context.injector @@ -183,6 +189,7 @@ def setUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_validate(self): diff --git a/aries_cloudagent/protocols/trustping/v1_0/routes.py b/aries_cloudagent/protocols/trustping/v1_0/routes.py index f8a41fd412..b1e850515f 100644 --- a/aries_cloudagent/protocols/trustping/v1_0/routes.py +++ b/aries_cloudagent/protocols/trustping/v1_0/routes.py @@ -2,9 +2,9 @@ from aiohttp import web from aiohttp_apispec import docs, match_info_schema, request_schema, response_schema - from marshmallow import fields +from ....admin.decorators.auth import tenant_authentication from ....admin.request_context import AdminRequestContext from ....connections.models.conn_record import ConnRecord from ....messaging.models.openapi import OpenAPISchema @@ -45,6 +45,7 @@ class PingConnIdMatchInfoSchema(OpenAPISchema): @match_info_schema(PingConnIdMatchInfoSchema()) @request_schema(PingRequestSchema()) @response_schema(PingRequestResponseSchema(), 200, description="") +@tenant_authentication async def connections_send_ping(request: web.BaseRequest): """Request handler for sending a trust ping to a connection. diff --git a/aries_cloudagent/protocols/trustping/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/trustping/v1_0/tests/test_routes.py index 97cd67993a..541c4e7abe 100644 --- a/aries_cloudagent/protocols/trustping/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/trustping/v1_0/tests/test_routes.py @@ -1,15 +1,21 @@ from unittest import IsolatedAsyncioTestCase + from aries_cloudagent.tests import mock from .....admin.request_context import AdminRequestContext - +from .....core.in_memory import InMemoryProfile from .. import routes as test_module class TestTrustpingRoutes(IsolatedAsyncioTestCase): def setUp(self): self.session_inject = {} - self.context = AdminRequestContext.test_context(self.session_inject) + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) + self.context = AdminRequestContext.test_context(self.session_inject, profile) self.request_dict = { "context": self.context, "outbound_message_router": mock.CoroutineMock(), @@ -19,6 +25,7 @@ def setUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) async def test_connections_send_ping(self): diff --git a/aries_cloudagent/resolver/routes.py b/aries_cloudagent/resolver/routes.py index 85fdc2522c..a027577556 100644 --- a/aries_cloudagent/resolver/routes.py +++ b/aries_cloudagent/resolver/routes.py @@ -2,10 +2,10 @@ from aiohttp import web from aiohttp_apispec import docs, match_info_schema, response_schema -from pydid.common import DID_PATTERN - from marshmallow import fields, validate +from pydid.common import DID_PATTERN +from ..admin.decorators.auth import tenant_authentication from ..admin.request_context import AdminRequestContext from ..messaging.models.openapi import OpenAPISchema from .base import DIDMethodNotSupported, DIDNotFound, ResolutionResult, ResolverError @@ -49,6 +49,7 @@ class DIDMatchInfoSchema(OpenAPISchema): @docs(tags=["resolver"], summary="Retrieve doc for requested did") @match_info_schema(DIDMatchInfoSchema()) @response_schema(ResolutionResultSchema(), 200) +@tenant_authentication async def resolve_did(request: web.Request): """Retrieve a did document.""" context: AdminRequestContext = request["context"] diff --git a/aries_cloudagent/resolver/tests/test_routes.py b/aries_cloudagent/resolver/tests/test_routes.py index bdb1c2fd73..311f60fbb2 100644 --- a/aries_cloudagent/resolver/tests/test_routes.py +++ b/aries_cloudagent/resolver/tests/test_routes.py @@ -3,11 +3,11 @@ # pylint: disable=redefined-outer-name import pytest -from aries_cloudagent.tests import mock from pydid import DIDDocument -from ...core.in_memory import InMemoryProfile +from aries_cloudagent.tests import mock +from ...core.in_memory import InMemoryProfile from .. import routes as test_module from ..base import ( DIDMethodNotSupported, @@ -18,7 +18,6 @@ ResolverType, ) from ..did_resolver import DIDResolver - from . import DOC @@ -59,7 +58,11 @@ def mock_resolver(resolution_result): @pytest.mark.asyncio async def test_resolver(mock_resolver, mock_response: mock.MagicMock, did_doc): - profile = InMemoryProfile.test_profile() + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) context = profile.context setattr(context, "profile", profile) session = await profile.session() @@ -77,6 +80,7 @@ async def test_resolver(mock_resolver, mock_response: mock.MagicMock, did_doc): query={}, json=mock.CoroutineMock(return_value={}), __getitem__=lambda _, k: request_dict[k], + headers={"x-api-key": "secret-key"}, ) with mock.patch.object( context.profile, @@ -100,7 +104,11 @@ async def test_resolver(mock_resolver, mock_response: mock.MagicMock, did_doc): async def test_resolver_not_found_error(mock_resolver, side_effect, error): mock_resolver.resolve_with_metadata = mock.CoroutineMock(side_effect=side_effect()) - profile = InMemoryProfile.test_profile() + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) context = profile.context setattr(context, "profile", profile) session = await profile.session() @@ -118,6 +126,7 @@ async def test_resolver_not_found_error(mock_resolver, side_effect, error): query={}, json=mock.CoroutineMock(return_value={}), __getitem__=lambda _, k: request_dict[k], + headers={"x-api-key": "secret-key"}, ) with mock.patch.object( context.profile, diff --git a/aries_cloudagent/revocation/indy.py b/aries_cloudagent/revocation/indy.py index 0f0c4114eb..bd0b8ced36 100644 --- a/aries_cloudagent/revocation/indy.py +++ b/aries_cloudagent/revocation/indy.py @@ -2,7 +2,8 @@ import logging from typing import Optional, Sequence, Tuple -from uuid import uuid4 + +from uuid_utils import uuid4 from ..core.profile import Profile from ..ledger.base import BaseLedger @@ -17,7 +18,6 @@ is_author_role, ) from ..storage.base import StorageNotFoundError - from .error import ( RevocationError, RevocationInvalidStateValueError, diff --git a/aries_cloudagent/revocation/manager.py b/aries_cloudagent/revocation/manager.py index eb67ab47eb..1d32cc3af6 100644 --- a/aries_cloudagent/revocation/manager.py +++ b/aries_cloudagent/revocation/manager.py @@ -338,10 +338,11 @@ async def clear_pending_revocations( async with self._profile.transaction() as txn: issuer_rr_recs = await IssuerRevRegRecord.query_by_pending(txn) for issuer_rr_rec in issuer_rr_recs: + if purge and issuer_rr_rec.revoc_reg_id not in purge: + continue rrid = issuer_rr_rec.revoc_reg_id await issuer_rr_rec.clear_pending(txn, (purge or {}).get(rrid)) - if issuer_rr_rec.pending_pub: - result[rrid] = issuer_rr_rec.pending_pub + result[rrid] = issuer_rr_rec.pending_pub notify.append(rrid) await txn.commit() diff --git a/aries_cloudagent/revocation/models/issuer_rev_reg_record.py b/aries_cloudagent/revocation/models/issuer_rev_reg_record.py index cf35ed9c96..34e2d53415 100644 --- a/aries_cloudagent/revocation/models/issuer_rev_reg_record.py +++ b/aries_cloudagent/revocation/models/issuer_rev_reg_record.py @@ -1,9 +1,8 @@ """Issuer revocation registry storage handling.""" -import json import importlib +import json import logging -import uuid from functools import total_ordering from os.path import join from pathlib import Path @@ -12,12 +11,10 @@ from urllib.parse import urlparse from marshmallow import fields, validate +from uuid_utils import uuid4 from ...core.profile import Profile, ProfileSession -from ...indy.credx.issuer import ( - CATEGORY_CRED_DEF, - CATEGORY_REV_REG_DEF_PRIVATE, -) +from ...indy.credx.issuer import CATEGORY_CRED_DEF, CATEGORY_REV_REG_DEF_PRIVATE from ...indy.issuer import IndyIssuer, IndyIssuerError from ...indy.models.revocation import ( IndyRevRegDef, @@ -190,7 +187,7 @@ def _check_url(self, url) -> None: async def generate_registry(self, profile: Profile): """Create the revocation registry definition and tails file.""" if not self.tag: - self.tag = self._id or str(uuid.uuid4()) + self.tag = self._id or str(uuid4()) if self.state != IssuerRevRegRecord.STATE_INIT: raise RevocationError( diff --git a/aries_cloudagent/revocation/routes.py b/aries_cloudagent/revocation/routes.py index 3ff56d6cbf..c2e0c13782 100644 --- a/aries_cloudagent/revocation/routes.py +++ b/aries_cloudagent/revocation/routes.py @@ -18,6 +18,7 @@ from marshmallow import fields, validate, validates_schema from marshmallow.exceptions import ValidationError +from ..admin.decorators.auth import tenant_authentication from ..admin.request_context import AdminRequestContext from ..connections.models.conn_record import ConnRecord from ..core.event_bus import Event, EventBus @@ -507,6 +508,7 @@ class RevRegConnIdMatchInfoSchema(OpenAPISchema): @querystring_schema(CreateRevRegTxnForEndorserOptionSchema()) @querystring_schema(RevRegConnIdMatchInfoSchema()) @response_schema(RevocationModuleResponseSchema(), description="") +@tenant_authentication async def revoke(request: web.BaseRequest): """Request handler for storing a credential revocation. @@ -617,6 +619,7 @@ async def revoke(request: web.BaseRequest): @querystring_schema(CreateRevRegTxnForEndorserOptionSchema()) @querystring_schema(RevRegConnIdMatchInfoSchema()) @response_schema(TxnOrPublishRevocationsResultSchema(), 200, description="") +@tenant_authentication async def publish_revocations(request: web.BaseRequest): """Request handler for publishing pending revocations to the ledger. @@ -687,6 +690,7 @@ async def publish_revocations(request: web.BaseRequest): @docs(tags=["revocation"], summary="Clear pending revocations") @request_schema(ClearPendingRevocationsRequestSchema()) @response_schema(PublishRevocationsSchema(), 200, description="") +@tenant_authentication async def clear_pending_revocations(request: web.BaseRequest): """Request handler for clearing pending revocations. @@ -717,6 +721,7 @@ async def clear_pending_revocations(request: web.BaseRequest): @docs(tags=["revocation"], summary="Rotate revocation registry") @match_info_schema(RevocationCredDefIdMatchInfoSchema()) @response_schema(RevRegsCreatedSchema(), 200, description="") +@tenant_authentication async def rotate_rev_reg(request: web.BaseRequest): """Request handler to rotate the active revocation registries for cred. def. @@ -749,6 +754,7 @@ async def rotate_rev_reg(request: web.BaseRequest): @docs(tags=["revocation"], summary="Creates a new revocation registry") @request_schema(RevRegCreateRequestSchema()) @response_schema(RevRegResultSchema(), 200, description="") +@tenant_authentication async def create_rev_reg(request: web.BaseRequest): """Request handler to create a new revocation registry. @@ -802,6 +808,7 @@ async def create_rev_reg(request: web.BaseRequest): ) @querystring_schema(RevRegsCreatedQueryStringSchema()) @response_schema(RevRegsCreatedSchema(), 200, description="") +@tenant_authentication async def rev_regs_created(request: web.BaseRequest): """Request handler to get revocation registries that current agent created. @@ -842,6 +849,7 @@ async def rev_regs_created(request: web.BaseRequest): ) @match_info_schema(RevRegIdMatchInfoSchema()) @response_schema(RevRegResultSchema(), 200, description="") +@tenant_authentication async def get_rev_reg(request: web.BaseRequest): """Request handler to get a revocation registry by rev reg id. @@ -874,6 +882,7 @@ async def get_rev_reg(request: web.BaseRequest): ) @match_info_schema(RevRegIdMatchInfoSchema()) @response_schema(RevRegIssuedResultSchema(), 200, description="") +@tenant_authentication async def get_rev_reg_issued_count(request: web.BaseRequest): """Request handler to get number of credentials issued against revocation registry. @@ -909,6 +918,7 @@ async def get_rev_reg_issued_count(request: web.BaseRequest): ) @match_info_schema(RevRegIdMatchInfoSchema()) @response_schema(CredRevRecordDetailsResultSchema(), 200, description="") +@tenant_authentication async def get_rev_reg_issued(request: web.BaseRequest): """Request handler to get credentials issued against revocation registry. @@ -946,6 +956,7 @@ async def get_rev_reg_issued(request: web.BaseRequest): ) @match_info_schema(RevRegIdMatchInfoSchema()) @response_schema(CredRevIndyRecordsResultSchema(), 200, description="") +@tenant_authentication async def get_rev_reg_indy_recs(request: web.BaseRequest): """Request handler to get details of revoked credentials from ledger. @@ -980,6 +991,7 @@ async def get_rev_reg_indy_recs(request: web.BaseRequest): @match_info_schema(RevRegIdMatchInfoSchema()) @querystring_schema(RevRegUpdateRequestMatchInfoSchema()) @response_schema(RevRegWalletUpdatedResultSchema(), 200, description="") +@tenant_authentication async def update_rev_reg_revoked_state(request: web.BaseRequest): """Request handler to fix ledger entry of credentials revoked against registry. @@ -1071,6 +1083,7 @@ async def update_rev_reg_revoked_state(request: web.BaseRequest): ) @querystring_schema(CredRevRecordQueryStringSchema()) @response_schema(CredRevRecordResultSchema(), 200, description="") +@tenant_authentication async def get_cred_rev_record(request: web.BaseRequest): """Request handler to get credential revocation record. @@ -1112,6 +1125,7 @@ async def get_cred_rev_record(request: web.BaseRequest): ) @match_info_schema(RevocationCredDefIdMatchInfoSchema()) @response_schema(RevRegResultSchema(), 200, description="") +@tenant_authentication async def get_active_rev_reg(request: web.BaseRequest): """Request handler to get current active revocation registry by cred def id. @@ -1145,6 +1159,7 @@ async def get_active_rev_reg(request: web.BaseRequest): ) @match_info_schema(RevRegIdMatchInfoSchema()) @response_schema(RevocationModuleResponseSchema, description="tails file") +@tenant_authentication async def get_tails_file(request: web.BaseRequest) -> web.FileResponse: """Request handler to download tails file for revocation registry. @@ -1177,6 +1192,7 @@ async def get_tails_file(request: web.BaseRequest) -> web.FileResponse: ) @match_info_schema(RevRegIdMatchInfoSchema()) @response_schema(RevocationModuleResponseSchema(), description="") +@tenant_authentication async def upload_tails_file(request: web.BaseRequest): """Request handler to upload local tails file for revocation registry. @@ -1215,6 +1231,7 @@ async def upload_tails_file(request: web.BaseRequest): @querystring_schema(CreateRevRegTxnForEndorserOptionSchema()) @querystring_schema(RevRegConnIdMatchInfoSchema()) @response_schema(TxnOrRevRegResultSchema(), 200, description="") +@tenant_authentication async def send_rev_reg_def(request: web.BaseRequest): """Request handler to send revocation registry definition by rev reg id to ledger. @@ -1335,6 +1352,7 @@ async def send_rev_reg_def(request: web.BaseRequest): @querystring_schema(CreateRevRegTxnForEndorserOptionSchema()) @querystring_schema(RevRegConnIdMatchInfoSchema()) @response_schema(RevRegResultSchema(), 200, description="") +@tenant_authentication async def send_rev_reg_entry(request: web.BaseRequest): """Request handler to send rev reg entry by registry id to ledger. @@ -1454,6 +1472,7 @@ async def send_rev_reg_entry(request: web.BaseRequest): @match_info_schema(RevRegIdMatchInfoSchema()) @request_schema(RevRegUpdateTailsFileUriSchema()) @response_schema(RevRegResultSchema(), 200, description="") +@tenant_authentication async def update_rev_reg(request: web.BaseRequest): """Request handler to update a rev reg's public tails URI by registry id. @@ -1491,6 +1510,7 @@ async def update_rev_reg(request: web.BaseRequest): @match_info_schema(RevRegIdMatchInfoSchema()) @querystring_schema(SetRevRegStateQueryStringSchema()) @response_schema(RevRegResultSchema(), 200, description="") +@tenant_authentication async def set_rev_reg_state(request: web.BaseRequest): """Request handler to set a revocation registry state manually. @@ -1744,6 +1764,7 @@ class TailsDeleteResponseSchema(OpenAPISchema): @querystring_schema(RevRegId()) @response_schema(TailsDeleteResponseSchema()) @docs(tags=["revocation"], summary="Delete the tail files") +@tenant_authentication async def delete_tails(request: web.BaseRequest) -> json: """Delete Tails Files.""" context: AdminRequestContext = request["context"] diff --git a/aries_cloudagent/revocation/tests/test_manager.py b/aries_cloudagent/revocation/tests/test_manager.py index 6ebc48a330..d811b31a27 100644 --- a/aries_cloudagent/revocation/tests/test_manager.py +++ b/aries_cloudagent/revocation/tests/test_manager.py @@ -619,18 +619,17 @@ async def test_publish_pending_revocations_1_rev_reg_some(self): mock_issuer_rev_reg_records[1].clear_pending.assert_not_called() async def test_clear_pending(self): + REV_REG_ID_2 = f"{TEST_DID}:4:{CRED_DEF_ID}:CL_ACCUM:tag2" mock_issuer_rev_reg_records = [ - mock.MagicMock( + test_module.IssuerRevRegRecord( revoc_reg_id=REV_REG_ID, tails_local_path=TAILS_LOCAL, - pending_pub=[], - clear_pending=mock.CoroutineMock(), + pending_pub=["1", "2"], ), - mock.MagicMock( - revoc_reg_id=f"{TEST_DID}:4:{CRED_DEF_ID}:CL_ACCUM:tag2", + test_module.IssuerRevRegRecord( + revoc_reg_id=REV_REG_ID_2, tails_local_path=TAILS_LOCAL, - pending_pub=[], - clear_pending=mock.CoroutineMock(), + pending_pub=["9", "99"], ), ] with mock.patch.object( @@ -639,21 +638,22 @@ async def test_clear_pending(self): mock.CoroutineMock(return_value=mock_issuer_rev_reg_records), ) as record: result = await self.manager.clear_pending_revocations() - assert result == {} + assert result[REV_REG_ID] == [] + assert result[REV_REG_ID_2] == [] async def test_clear_pending_1_rev_reg_all(self): + REV_REG_ID_2 = f"{TEST_DID}:4:{CRED_DEF_ID}:CL_ACCUM:tag2" + mock_issuer_rev_reg_records = [ - mock.MagicMock( + test_module.IssuerRevRegRecord( revoc_reg_id=REV_REG_ID, tails_local_path=TAILS_LOCAL, pending_pub=["1", "2"], - clear_pending=mock.CoroutineMock(), ), - mock.MagicMock( - revoc_reg_id=f"{TEST_DID}:4:{CRED_DEF_ID}:CL_ACCUM:tag2", + test_module.IssuerRevRegRecord( + revoc_reg_id=REV_REG_ID_2, tails_local_path=TAILS_LOCAL, pending_pub=["9", "99"], - clear_pending=mock.CoroutineMock(), ), ] with mock.patch.object( @@ -661,25 +661,22 @@ async def test_clear_pending_1_rev_reg_all(self): "query_by_pending", mock.CoroutineMock(return_value=mock_issuer_rev_reg_records), ) as record: - result = await self.manager.clear_pending_revocations({REV_REG_ID: None}) - assert result == { - REV_REG_ID: ["1", "2"], - f"{TEST_DID}:4:{CRED_DEF_ID}:CL_ACCUM:tag2": ["9", "99"], - } + result = await self.manager.clear_pending_revocations({REV_REG_ID: []}) + assert result[REV_REG_ID] == [] + assert result.get(REV_REG_ID_2) is None async def test_clear_pending_1_rev_reg_some(self): + REV_REG_ID_2 = f"{TEST_DID}:4:{CRED_DEF_ID}:CL_ACCUM:tag2" mock_issuer_rev_reg_records = [ - mock.MagicMock( + test_module.IssuerRevRegRecord( revoc_reg_id=REV_REG_ID, tails_local_path=TAILS_LOCAL, pending_pub=["1", "2"], - clear_pending=mock.CoroutineMock(), ), - mock.MagicMock( - revoc_reg_id=f"{TEST_DID}:4:{CRED_DEF_ID}:CL_ACCUM:tag2", + test_module.IssuerRevRegRecord( + revoc_reg_id=REV_REG_ID_2, tails_local_path=TAILS_LOCAL, pending_pub=["99"], - clear_pending=mock.CoroutineMock(), ), ] with mock.patch.object( @@ -688,10 +685,34 @@ async def test_clear_pending_1_rev_reg_some(self): mock.CoroutineMock(return_value=mock_issuer_rev_reg_records), ) as record: result = await self.manager.clear_pending_revocations({REV_REG_ID: ["9"]}) - assert result == { - REV_REG_ID: ["1", "2"], - f"{TEST_DID}:4:{CRED_DEF_ID}:CL_ACCUM:tag2": ["99"], - } + + assert result[REV_REG_ID] == ["1", "2"] + assert result.get(REV_REG_ID_2) is None + + async def test_clear_pending_both(self): + REV_REG_ID_2 = f"{TEST_DID}:4:{CRED_DEF_ID}:CL_ACCUM:tag2" + mock_issuer_rev_reg_records = [ + test_module.IssuerRevRegRecord( + revoc_reg_id=REV_REG_ID, + tails_local_path=TAILS_LOCAL, + pending_pub=["1", "2"], + ), + test_module.IssuerRevRegRecord( + revoc_reg_id=REV_REG_ID_2, + tails_local_path=TAILS_LOCAL, + pending_pub=["99"], + ), + ] + with mock.patch.object( + test_module.IssuerRevRegRecord, + "query_by_pending", + mock.CoroutineMock(return_value=mock_issuer_rev_reg_records), + ) as record: + result = await self.manager.clear_pending_revocations( + {REV_REG_ID: ["1"], REV_REG_ID_2: ["99"]} + ) + assert result[REV_REG_ID] == ["2"] + assert result[REV_REG_ID_2] == [] async def test_retrieve_records(self): session = await self.profile.session() diff --git a/aries_cloudagent/revocation/tests/test_routes.py b/aries_cloudagent/revocation/tests/test_routes.py index 71a9841d16..95e4eab0b5 100644 --- a/aries_cloudagent/revocation/tests/test_routes.py +++ b/aries_cloudagent/revocation/tests/test_routes.py @@ -17,7 +17,11 @@ class TestRevocationRoutes(IsolatedAsyncioTestCase): def setUp(self): - self.profile = InMemoryProfile.test_profile() + self.profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) self.context = self.profile.context setattr(self.context, "profile", self.profile) self.request_dict = { @@ -29,11 +33,16 @@ def setUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) self.test_did = "sample-did" - self.author_profile = InMemoryProfile.test_profile() + self.author_profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "author-key", + } + ) self.author_profile.settings.set_value("endorser.author", True) self.author_context = self.author_profile.context setattr(self.author_context, "profile", self.author_profile) @@ -46,6 +55,7 @@ def setUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.author_request_dict[k], + headers={"x-api-key": "author-key"}, ) async def test_validate_cred_rev_rec_qs_and_revoke_req(self): @@ -1054,7 +1064,7 @@ async def test_set_rev_reg_state_not_found(self): async def test_wrong_profile_403(self): self.profile = InMemoryProfile.test_profile( - settings={"wallet.type": "askar"}, + settings={"wallet.type": "askar", "admin.admin_api_key": "secret-key"}, profile_class=AskarAnoncredsProfile, ) self.context = AdminRequestContext.test_context({}, self.profile) @@ -1067,6 +1077,7 @@ async def test_wrong_profile_403(self): query={}, __getitem__=lambda _, k: self.request_dict[k], context=self.context, + headers={"x-api-key": "secret-key"}, ) self.request.json = mock.CoroutineMock( diff --git a/aries_cloudagent/revocation_anoncreds/routes.py b/aries_cloudagent/revocation_anoncreds/routes.py index 99b66f1bd2..e6cf3ec7e7 100644 --- a/aries_cloudagent/revocation_anoncreds/routes.py +++ b/aries_cloudagent/revocation_anoncreds/routes.py @@ -2,7 +2,6 @@ import json import logging -import uuid from aiohttp import web from aiohttp_apispec import ( @@ -14,7 +13,9 @@ ) from marshmallow import fields, validate, validates_schema from marshmallow.exceptions import ValidationError +from uuid_utils import uuid4 +from ..admin.decorators.auth import tenant_authentication from ..admin.request_context import AdminRequestContext from ..anoncreds.base import ( AnonCredsObjectNotFound, @@ -459,6 +460,7 @@ def validate_fields(self, data, **kwargs): ) @request_schema(RevokeRequestSchemaAnoncreds()) @response_schema(RevocationAnoncredsModuleResponseSchema(), description="") +@tenant_authentication async def revoke(request: web.BaseRequest): """Request handler for storing a credential revocation. @@ -512,6 +514,7 @@ async def revoke(request: web.BaseRequest): @docs(tags=[TAG_TITLE], summary="Publish pending revocations to ledger") @request_schema(PublishRevocationsSchemaAnoncreds()) @response_schema(PublishRevocationsResultSchemaAnoncreds(), 200, description="") +@tenant_authentication async def publish_revocations(request: web.BaseRequest): """Request handler for publishing pending revocations to the ledger. @@ -551,6 +554,7 @@ async def publish_revocations(request: web.BaseRequest): ) @querystring_schema(RevRegsCreatedQueryStringSchema()) @response_schema(RevRegsCreatedSchemaAnoncreds(), 200, description="") +@tenant_authentication async def get_rev_regs(request: web.BaseRequest): """Request handler to get revocation registries that current agent created. @@ -589,6 +593,7 @@ async def get_rev_regs(request: web.BaseRequest): ) @match_info_schema(RevRegIdMatchInfoSchema()) @response_schema(RevRegResultSchemaAnoncreds(), 200, description="") +@tenant_authentication async def get_rev_reg(request: web.BaseRequest): """Request handler to get a revocation registry by rev reg id. @@ -631,7 +636,7 @@ async def _get_issuer_rev_reg_record( # transform result = IssuerRevRegRecord( - record_id=uuid.uuid4(), + record_id=uuid4(), state=state, cred_def_id=rev_reg_def.cred_def_id, error_msg=None, @@ -663,6 +668,7 @@ async def _get_issuer_rev_reg_record( ) @match_info_schema(RevocationCredDefIdMatchInfoSchema()) @response_schema(RevRegResultSchemaAnoncreds(), 200, description="") +@tenant_authentication async def get_active_rev_reg(request: web.BaseRequest): """Request handler to get current active revocation registry by cred def id. @@ -692,6 +698,7 @@ async def get_active_rev_reg(request: web.BaseRequest): @docs(tags=[TAG_TITLE], summary="Rotate revocation registry") @match_info_schema(RevocationCredDefIdMatchInfoSchema()) @response_schema(RevRegsCreatedSchemaAnoncreds(), 200, description="") +@tenant_authentication async def rotate_rev_reg(request: web.BaseRequest): """Request handler to rotate the active revocation registries for cred. def. @@ -724,6 +731,7 @@ async def rotate_rev_reg(request: web.BaseRequest): ) @match_info_schema(RevRegIdMatchInfoSchema()) @response_schema(RevRegIssuedResultSchemaAnoncreds(), 200, description="") +@tenant_authentication async def get_rev_reg_issued_count(request: web.BaseRequest): """Request handler to get number of credentials issued against revocation registry. @@ -764,6 +772,7 @@ async def get_rev_reg_issued_count(request: web.BaseRequest): ) @match_info_schema(RevRegIdMatchInfoSchema()) @response_schema(CredRevRecordDetailsResultSchemaAnoncreds(), 200, description="") +@tenant_authentication async def get_rev_reg_issued(request: web.BaseRequest): """Request handler to get credentials issued against revocation registry. @@ -805,6 +814,7 @@ async def get_rev_reg_issued(request: web.BaseRequest): ) @match_info_schema(RevRegIdMatchInfoSchema()) @response_schema(CredRevIndyRecordsResultSchemaAnoncreds(), 200, description="") +@tenant_authentication async def get_rev_reg_indy_recs(request: web.BaseRequest): """Request handler to get details of revoked credentials from ledger. @@ -850,6 +860,7 @@ async def get_rev_reg_indy_recs(request: web.BaseRequest): @match_info_schema(RevRegIdMatchInfoSchema()) @querystring_schema(RevRegUpdateRequestMatchInfoSchema()) @response_schema(RevRegWalletUpdatedResultSchemaAnoncreds(), 200, description="") +@tenant_authentication async def update_rev_reg_revoked_state(request: web.BaseRequest): """Request handler to fix ledger entry of credentials revoked against registry. @@ -945,6 +956,7 @@ async def update_rev_reg_revoked_state(request: web.BaseRequest): ) @querystring_schema(CredRevRecordQueryStringSchema()) @response_schema(CredRevRecordResultSchemaAnoncreds(), 200, description="") +@tenant_authentication async def get_cred_rev_record(request: web.BaseRequest): """Request handler to get credential revocation record. @@ -987,6 +999,7 @@ async def get_cred_rev_record(request: web.BaseRequest): ) @match_info_schema(RevRegIdMatchInfoSchema()) @response_schema(RevocationAnoncredsModuleResponseSchema, description="tails file") +@tenant_authentication async def get_tails_file(request: web.BaseRequest) -> web.FileResponse: """Request handler to download tails file for revocation registry. @@ -1025,6 +1038,7 @@ async def get_tails_file(request: web.BaseRequest) -> web.FileResponse: @match_info_schema(RevRegIdMatchInfoSchema()) @querystring_schema(SetRevRegStateQueryStringSchema()) @response_schema(RevRegResultSchemaAnoncreds(), 200, description="") +@tenant_authentication async def set_rev_reg_state(request: web.BaseRequest): """Request handler to set a revocation registry state manually. diff --git a/aries_cloudagent/revocation_anoncreds/tests/test_routes.py b/aries_cloudagent/revocation_anoncreds/tests/test_routes.py index 2198e7668b..5988b35464 100644 --- a/aries_cloudagent/revocation_anoncreds/tests/test_routes.py +++ b/aries_cloudagent/revocation_anoncreds/tests/test_routes.py @@ -8,10 +8,7 @@ from aries_cloudagent.tests import mock from ...admin.request_context import AdminRequestContext -from ...anoncreds.models.anoncreds_revocation import ( - RevRegDef, - RevRegDefValue, -) +from ...anoncreds.models.anoncreds_revocation import RevRegDef, RevRegDefValue from ...askar.profile import AskarProfile from ...askar.profile_anon import AskarAnoncredsProfile from ...core.in_memory import InMemoryProfile @@ -20,7 +17,10 @@ class TestRevocationRoutes(IsolatedAsyncioTestCase): def setUp(self): - self.profile = InMemoryProfile.test_profile(profile_class=AskarAnoncredsProfile) + self.profile = InMemoryProfile.test_profile( + settings={"admin.admin_api_key": "secret-key"}, + profile_class=AskarAnoncredsProfile, + ) self.context = self.profile.context setattr(self.context, "profile", self.profile) self.request_dict = { @@ -32,6 +32,7 @@ def setUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) self.test_did = "sample-did" @@ -198,7 +199,7 @@ async def test_get_rev_reg(self): with mock.patch.object( test_module, "AnonCredsRevocation", autospec=True ) as mock_anon_creds_revoc, mock.patch.object( - test_module.uuid, "uuid4", mock.Mock() + test_module, "uuid4", mock.Mock() ) as mock_uuid, mock.patch.object( test_module.web, "json_response", mock.Mock() ) as mock_json_response: @@ -445,7 +446,7 @@ async def test_set_rev_reg_state(self): with mock.patch.object( test_module, "AnonCredsRevocation", autospec=True ) as mock_anon_creds_revoc, mock.patch.object( - test_module.uuid, "uuid4", mock.Mock() + test_module, "uuid4", mock.Mock() ) as mock_uuid, mock.patch.object( test_module.web, "json_response", mock.Mock() ) as mock_json_response: @@ -524,7 +525,7 @@ async def test_set_rev_reg_state_not_found(self): async def test_wrong_profile_403(self): self.profile = InMemoryProfile.test_profile( - settings={"wallet.type": "askar"}, + settings={"wallet.type": "askar", "admin.admin_api_key": "secret-key"}, profile_class=AskarProfile, ) self.context = AdminRequestContext.test_context({}, self.profile) @@ -537,6 +538,7 @@ async def test_wrong_profile_403(self): query={}, __getitem__=lambda _, k: self.request_dict[k], context=self.context, + headers={"x-api-key": "secret-key"}, ) self.request.json = mock.CoroutineMock( diff --git a/aries_cloudagent/settings/tests/test_routes.py b/aries_cloudagent/settings/tests/test_routes.py index 4c103bf2ba..3b2e3eb76b 100644 --- a/aries_cloudagent/settings/tests/test_routes.py +++ b/aries_cloudagent/settings/tests/test_routes.py @@ -3,13 +3,13 @@ # pylint: disable=redefined-outer-name import pytest + from aries_cloudagent.tests import mock from ...admin.request_context import AdminRequestContext from ...core.in_memory import InMemoryProfile from ...multitenant.base import BaseMultitenantManager from ...multitenant.manager import MultitenantManager - from .. import routes as test_module @@ -24,7 +24,11 @@ def mock_response(): @pytest.mark.asyncio async def test_get_profile_settings(mock_response): - profile = InMemoryProfile.test_profile() + profile = InMemoryProfile.test_profile( + settings={ + "admin.admin_api_key": "secret-key", + } + ) profile.settings.update( { "admin.admin_client_max_request_size": 1, @@ -45,6 +49,7 @@ async def test_get_profile_settings(mock_response): query={}, json=mock.CoroutineMock(return_value={}), __getitem__=lambda _, k: request_dict[k], + headers={"x-api-key": "secret-key"}, ) await test_module.get_profile_settings(request) assert mock_response.call_args[0][0] == { diff --git a/aries_cloudagent/storage/indy.py b/aries_cloudagent/storage/indy.py deleted file mode 100644 index 16bfab6624..0000000000 --- a/aries_cloudagent/storage/indy.py +++ /dev/null @@ -1,334 +0,0 @@ -"""Indy implementation of BaseStorage interface.""" - -import asyncio -import json -import logging -from typing import Mapping, Sequence - -from indy import non_secrets -from indy.error import IndyError, ErrorCode - -from .base import ( - DEFAULT_PAGE_SIZE, - BaseStorage, - BaseStorageSearch, - BaseStorageSearchSession, - validate_record, -) -from .error import ( - StorageError, - StorageDuplicateError, - StorageNotFoundError, - StorageSearchError, -) -from .record import StorageRecord -from ..indy.sdk.wallet_setup import IndyOpenWallet - -LOGGER = logging.getLogger(__name__) - - -class IndySdkStorage(BaseStorage, BaseStorageSearch): - """Indy Non-Secrets interface.""" - - def __init__(self, wallet: IndyOpenWallet): - """Initialize an `IndySdkStorage` instance. - - Args: - wallet: The indy wallet instance to use - - """ - self._wallet = wallet - - @property - def wallet(self) -> IndyOpenWallet: - """Accessor for IndyOpenWallet instance.""" - return self._wallet - - async def add_record(self, record: StorageRecord): - """Add a new record to the store. - - Args: - record: `StorageRecord` to be stored - - """ - validate_record(record) - tags_json = json.dumps(record.tags) if record.tags else None - try: - await non_secrets.add_wallet_record( - self._wallet.handle, record.type, record.id, record.value, tags_json - ) - except IndyError as x_indy: - if x_indy.error_code == ErrorCode.WalletItemAlreadyExists: - raise StorageDuplicateError( - "Duplicate record ID: {}".format(record.id) - ) from x_indy - raise StorageError(str(x_indy)) from x_indy - - async def get_record( - self, record_type: str, record_id: str, options: Mapping = None - ) -> StorageRecord: - """Fetch a record from the store by type and ID. - - Args: - record_type: The record type - record_id: The record id - options: A dictionary of backend-specific options - - Returns: - A `StorageRecord` instance - - Raises: - StorageError: If the record is not provided - StorageError: If the record ID not provided - StorageNotFoundError: If the record is not found - StorageError: If record not found - - """ - if not record_type: - raise StorageError("Record type not provided") - if not record_id: - raise StorageError("Record ID not provided") - if not options: - options = {} - options_json = json.dumps( - { - "retrieveType": False, - "retrieveValue": True, - "retrieveTags": options.get("retrieveTags", True), - } - ) - try: - result_json = await non_secrets.get_wallet_record( - self._wallet.handle, record_type, record_id, options_json - ) - except IndyError as x_indy: - if x_indy.error_code == ErrorCode.WalletItemNotFound: - raise StorageNotFoundError( - f"{record_type} record not found: {record_id}" - ) from x_indy - raise StorageError(str(x_indy)) from x_indy - result = json.loads(result_json) - return StorageRecord( - type=record_type, - id=result["id"], - value=result["value"], - tags=result["tags"] or {}, - ) - - async def update_record(self, record: StorageRecord, value: str, tags: Mapping): - """Update an existing stored record's value and tags. - - Args: - record: `StorageRecord` to update - value: The new value - tags: The new tags - - Raises: - StorageNotFoundError: If record not found - StorageError: If a libindy error occurs - - """ - validate_record(record) - tags_json = json.dumps(tags) if tags else "{}" - try: - await non_secrets.update_wallet_record_value( - self._wallet.handle, record.type, record.id, value - ) - await non_secrets.update_wallet_record_tags( - self._wallet.handle, record.type, record.id, tags_json - ) - except IndyError as x_indy: - if x_indy.error_code == ErrorCode.WalletItemNotFound: - raise StorageNotFoundError(f"Record not found: {record.id}") - raise StorageError(str(x_indy)) - - async def delete_record(self, record: StorageRecord): - """Delete a record. - - Args: - record: `StorageRecord` to delete - - Raises: - StorageNotFoundError: If record not found - StorageError: If a libindy error occurs - - """ - validate_record(record, delete=True) - try: - await non_secrets.delete_wallet_record( - self._wallet.handle, record.type, record.id - ) - except IndyError as x_indy: - if x_indy.error_code == ErrorCode.WalletItemNotFound: - raise StorageNotFoundError(f"Record not found: {record.id}") - raise StorageError(str(x_indy)) - - async def find_all_records( - self, - type_filter: str, - tag_query: Mapping = None, - options: Mapping = None, - ): - """Retrieve all records matching a particular type filter and tag query.""" - results = [] - search = self.search_records(type_filter, tag_query, options=options) - while True: - buf = await search.fetch() - if buf: - results.extend(buf) - else: - break - return results - - async def delete_all_records( - self, - type_filter: str, - tag_query: Mapping = None, - ): - """Remove all records matching a particular type filter and tag query.""" - async for row in self.search_records( - type_filter, tag_query, options={"retrieveTags": False} - ): - await self.delete_record(row) - - def search_records( - self, - type_filter: str, - tag_query: Mapping = None, - page_size: int = None, - options: Mapping = None, - ) -> "IndySdkStorageSearch": - """Search stored records. - - Args: - type_filter: Filter string - tag_query: Tags to query - page_size: Page size - options: Dictionary of backend-specific options - - Returns: - An instance of `IndySdkStorageSearch` - - """ - return IndySdkStorageSearch(self, type_filter, tag_query, page_size, options) - - -class IndySdkStorageSearch(BaseStorageSearchSession): - """Represent an active stored records search.""" - - def __init__( - self, - store: IndySdkStorage, - type_filter: str, - tag_query: Mapping, - page_size: int = None, - options: Mapping = None, - ): - """Initialize a `IndySdkStorageSearch` instance. - - Args: - store: `BaseStorage` to search - type_filter: Filter string - tag_query: Tags to search - page_size: Size of page to return - - """ - self._handle = None - self._done = False - self.store = store - self.options = options or {} - self.page_size = page_size or DEFAULT_PAGE_SIZE - self.tag_query = tag_query - self.type_filter = type_filter - - async def fetch(self, max_count: int = None) -> Sequence[StorageRecord]: - """Fetch the next list of results from the store. - - Args: - max_count: Max number of records to return. If not provided, - defaults to the backend's preferred page size - - Returns: - A list of `StorageRecord` instances - - Raises: - StorageSearchError: If the search query has not been opened - - """ - if self._done: - raise StorageSearchError("Search query is complete") - await self._open() - - try: - result_json = await non_secrets.fetch_wallet_search_next_records( - self.store.wallet.handle, self._handle, max_count or self.page_size - ) - except IndyError as x_indy: - raise StorageSearchError(str(x_indy)) from x_indy - - results = json.loads(result_json) - ret = [] - if results["records"]: - for row in results["records"]: - ret.append( - StorageRecord( - type=self.type_filter, - id=row["id"], - value=row["value"], - tags=row["tags"], - ) - ) - - if not ret: - await self.close() - - return ret - - async def _open(self): - """Start the search query.""" - if self._handle: - return - - query_json = json.dumps(self.tag_query or {}) - options_json = json.dumps( - { - "retrieveRecords": True, - "retrieveTotalCount": False, - "retrieveType": False, - "retrieveValue": True, - "retrieveTags": self.options.get("retrieveTags", True), - } - ) - try: - self._handle = await non_secrets.open_wallet_search( - self.store.wallet.handle, self.type_filter, query_json, options_json - ) - except IndyError as x_indy: - raise StorageSearchError(str(x_indy)) from x_indy - - async def close(self): - """Dispose of the search query.""" - try: - if self._handle: - await non_secrets.close_wallet_search(self._handle) - self._handle = None - self.store = None - self._done = True - except IndyError as x_indy: - raise StorageSearchError(str(x_indy)) from x_indy - - def __del__(self): - """Ensure the search is closed.""" - if self._handle: - - async def cleanup(handle): - LOGGER.warning("Indy wallet search was not closed manually") - try: - await non_secrets.close_wallet_search(handle) - except Exception: - LOGGER.exception("Exception when auto-closing Indy wallet search") - - loop = asyncio.get_event_loop() - task = loop.create_task(cleanup(self._handle)) - if not loop.is_running(): - loop.run_until_complete(task) diff --git a/aries_cloudagent/storage/record.py b/aries_cloudagent/storage/record.py index 36266db7af..4290f96952 100644 --- a/aries_cloudagent/storage/record.py +++ b/aries_cloudagent/storage/record.py @@ -2,7 +2,8 @@ from collections import namedtuple from typing import Optional -from uuid import uuid4 + +from uuid_utils import uuid4 class StorageRecord(namedtuple("StorageRecord", "type value tags id")): diff --git a/aries_cloudagent/storage/tests/test_indy_storage.py b/aries_cloudagent/storage/tests/test_indy_storage.py deleted file mode 100644 index 77e4c8f3e2..0000000000 --- a/aries_cloudagent/storage/tests/test_indy_storage.py +++ /dev/null @@ -1,502 +0,0 @@ -import asyncio -import json -import pytest -import os - -import indy.anoncreds -import indy.crypto -import indy.did -import indy.wallet - -from indy.error import ErrorCode -from aries_cloudagent.tests import mock - -from ...config.injection_context import InjectionContext -from ...indy.sdk.profile import IndySdkProfileManager, IndySdkProfile -from ...storage.base import BaseStorage -from ...storage.error import StorageError, StorageSearchError -from ...storage.indy import IndySdkStorage -from ...storage.record import StorageRecord -from ...wallet.indy import IndySdkWallet -from ...ledger.indy import IndySdkLedgerPool - -from .. import indy as test_module -from . import test_in_memory_storage - - -async def make_profile(): - key = await IndySdkWallet.generate_wallet_key() - context = InjectionContext() - context.injector.bind_instance(IndySdkLedgerPool, IndySdkLedgerPool("name")) - with mock.patch.object(IndySdkProfile, "_make_finalizer"): - return await IndySdkProfileManager().provision( - context, - { - "auto_recreate": True, - "auto_remove": True, - "name": "test-wallet", - "key": key, - "key_derivation_method": "RAW", # much slower tests with argon-hashed keys - }, - ) - - -@pytest.fixture() -async def store(): - profile = await make_profile() - async with profile.session() as session: - yield session.inject(BaseStorage) - await profile.close() - - -@pytest.fixture() -async def store_search(): - profile = await make_profile() - async with profile.session() as session: - yield session.inject(BaseStorage) - await profile.close() - - -@pytest.mark.indy -class TestIndySdkStorage(test_in_memory_storage.TestInMemoryStorage): - """Tests for indy storage.""" - - @pytest.mark.asyncio - async def test_record(self): - with mock.patch( - "aries_cloudagent.indy.sdk.wallet_plugin.load_postgres_plugin", - mock.MagicMock(), - ) as mock_load, mock.patch.object( - indy.wallet, "create_wallet", mock.CoroutineMock() - ) as mock_create, mock.patch.object( - indy.wallet, "open_wallet", mock.CoroutineMock() - ) as mock_open, mock.patch.object( - indy.anoncreds, "prover_create_master_secret", mock.CoroutineMock() - ) as mock_master, mock.patch.object( - indy.wallet, "close_wallet", mock.CoroutineMock() - ) as mock_close, mock.patch.object( - indy.wallet, "delete_wallet", mock.CoroutineMock() - ) as mock_delete, mock.patch.object( - IndySdkProfile, "_make_finalizer" - ): - config = { - "auto_recreate": True, - "auto_remove": True, - "name": "test-wallet", - "key": await IndySdkWallet.generate_wallet_key(), - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": json.dumps({"url": "dummy"}), - "storage_creds": json.dumps( - { - "account": "postgres", - "password": "mysecretpassword", - "admin_account": "postgres", - "admin_password": "mysecretpassword", - } - ), - } - context = InjectionContext() - context.injector.bind_instance(IndySdkLedgerPool, IndySdkLedgerPool("name")) - fake_profile = await IndySdkProfileManager().provision(context, config) - opened = await IndySdkProfileManager().open(context, config) # cover open() - await opened.close() - - session = await fake_profile.session() - storage = session.inject(BaseStorage) - - for record_x in [ - None, - StorageRecord( - type="connection", - value=json.dumps( - { - "initiator": "self", - "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", - "state": "invitation", - "routing_state": "none", - "error_msg": None, - "their_label": None, - "created_at": "2019-05-14 21:58:24.143260+00:00", - "updated_at": "2019-05-14 21:58:24.143260+00:00", - } - ), - tags={ - "initiator": "self", - "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", - "state": "invitation", - "routing_state": "none", - }, - id=None, - ), - StorageRecord( - type=None, - value=json.dumps( - { - "initiator": "self", - "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", - "state": "invitation", - "routing_state": "none", - "error_msg": None, - "their_label": None, - "created_at": "2019-05-14 21:58:24.143260+00:00", - "updated_at": "2019-05-14 21:58:24.143260+00:00", - } - ), - tags={ - "initiator": "self", - "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", - "state": "invitation", - "routing_state": "none", - }, - id="f96f76ec-0e9b-4f32-8237-f4219e6cf0c7", - ), - StorageRecord( - type="connection", - value=None, - tags={ - "initiator": "self", - "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", - "state": "invitation", - "routing_state": "none", - }, - id="f96f76ec-0e9b-4f32-8237-f4219e6cf0c7", - ), - ]: - with pytest.raises(StorageError): - await storage.add_record(record_x) - - with pytest.raises(StorageError): - await storage.get_record(None, "dummy-id") - with pytest.raises(StorageError): - await storage.get_record("connection", None) - - with mock.patch.object( - indy.non_secrets, "get_wallet_record", mock.CoroutineMock() - ) as mock_get_record: - mock_get_record.side_effect = test_module.IndyError( - ErrorCode.CommonInvalidStructure - ) - with pytest.raises(test_module.StorageError): - await storage.get_record("connection", "dummy-id") - - with mock.patch.object( - indy.non_secrets, - "update_wallet_record_value", - mock.CoroutineMock(), - ) as mock_update_value, mock.patch.object( - indy.non_secrets, - "update_wallet_record_tags", - mock.CoroutineMock(), - ) as mock_update_tags, mock.patch.object( - indy.non_secrets, - "delete_wallet_record", - mock.CoroutineMock(), - ) as mock_delete: - mock_update_value.side_effect = test_module.IndyError( - ErrorCode.CommonInvalidStructure - ) - mock_update_tags.side_effect = test_module.IndyError( - ErrorCode.CommonInvalidStructure - ) - mock_delete.side_effect = test_module.IndyError( - ErrorCode.CommonInvalidStructure - ) - - rec = StorageRecord( - type="connection", - value=json.dumps( - { - "initiator": "self", - "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", - "state": "invitation", - "routing_state": "none", - "error_msg": None, - "their_label": None, - "created_at": "2019-05-14 21:58:24.143260+00:00", - "updated_at": "2019-05-14 21:58:24.143260+00:00", - } - ), - tags={ - "initiator": "self", - "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", - "state": "invitation", - "routing_state": "none", - }, - id="f96f76ec-0e9b-4f32-8237-f4219e6cf0c7", - ) - - with pytest.raises(test_module.StorageError): - await storage.update_record(rec, "dummy-value", {"tag": "tag"}) - - with pytest.raises(test_module.StorageError): - await storage.delete_record(rec) - - @pytest.mark.asyncio - async def test_storage_search_x(self): - with mock.patch( - "aries_cloudagent.indy.sdk.wallet_plugin.load_postgres_plugin", - mock.MagicMock(), - ) as mock_load, mock.patch.object( - indy.wallet, "create_wallet", mock.CoroutineMock() - ) as mock_create, mock.patch.object( - indy.wallet, "open_wallet", mock.CoroutineMock() - ) as mock_open, mock.patch.object( - indy.anoncreds, "prover_create_master_secret", mock.CoroutineMock() - ) as mock_master, mock.patch.object( - indy.wallet, "close_wallet", mock.CoroutineMock() - ) as mock_close, mock.patch.object( - indy.wallet, "delete_wallet", mock.CoroutineMock() - ) as mock_delete, mock.patch.object( - IndySdkProfile, "_make_finalizer" - ): - context = InjectionContext() - context.injector.bind_instance(IndySdkLedgerPool, IndySdkLedgerPool("name")) - fake_profile = await IndySdkProfileManager().provision( - context, - { - "auto_recreate": True, - "auto_remove": True, - "name": "test_pg_wallet", - "key": await IndySdkWallet.generate_wallet_key(), - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": json.dumps({"url": "dummy"}), - "storage_creds": json.dumps( - { - "account": "postgres", - "password": "mysecretpassword", - "admin_account": "postgres", - "admin_password": "mysecretpassword", - } - ), - }, - ) - session = await fake_profile.session() - storage = session.inject(BaseStorage) - - search = storage.search_records("connection") - with pytest.raises(StorageSearchError): - await search.fetch(10) - - with mock.patch.object( - indy.non_secrets, "open_wallet_search", mock.CoroutineMock() - ) as mock_indy_open_search, mock.patch.object( - indy.non_secrets, "close_wallet_search", mock.CoroutineMock() - ) as mock_indy_close_search: - mock_indy_open_search.side_effect = test_module.IndyError("no open") - search = storage.search_records("connection") - with pytest.raises(StorageSearchError): - await search.fetch() - await search.close() - - with mock.patch.object( - indy.non_secrets, "open_wallet_search", mock.CoroutineMock() - ) as mock_indy_open_search, mock.patch.object( - indy.non_secrets, - "fetch_wallet_search_next_records", - mock.CoroutineMock(), - ) as mock_indy_fetch, mock.patch.object( - indy.non_secrets, "close_wallet_search", mock.CoroutineMock() - ) as mock_indy_close_search: - mock_indy_fetch.side_effect = test_module.IndyError("no fetch") - search = storage.search_records("connection") - with pytest.raises(StorageSearchError): - await search.fetch(10) - await search.close() - - with mock.patch.object( - indy.non_secrets, "open_wallet_search", mock.CoroutineMock() - ) as mock_indy_open_search, mock.patch.object( - indy.non_secrets, "close_wallet_search", mock.CoroutineMock() - ) as mock_indy_close_search: - mock_indy_close_search.side_effect = test_module.IndyError("no close") - search = storage.search_records("connection") - with pytest.raises(StorageSearchError): - await search.fetch() - - @pytest.mark.asyncio - async def test_storage_del_close(self): - with mock.patch.object( - indy.wallet, "create_wallet", mock.CoroutineMock() - ) as mock_create, mock.patch.object( - indy.wallet, "open_wallet", mock.CoroutineMock() - ) as mock_open, mock.patch.object( - indy.anoncreds, "prover_create_master_secret", mock.CoroutineMock() - ) as mock_master, mock.patch.object( - indy.wallet, "close_wallet", mock.CoroutineMock() - ) as mock_close, mock.patch.object( - indy.wallet, "delete_wallet", mock.CoroutineMock() - ) as mock_delete, mock.patch.object( - IndySdkProfile, "_make_finalizer" - ): - context = InjectionContext() - context.injector.bind_instance(IndySdkLedgerPool, IndySdkLedgerPool("name")) - fake_profile = await IndySdkProfileManager().provision( - context, - { - "auto_recreate": True, - "auto_remove": True, - "name": "test_indy_wallet", - "key": await IndySdkWallet.generate_wallet_key(), - "key_derivation_method": "RAW", - }, - ) - session = await fake_profile.session() - storage = session.inject(BaseStorage) - - with mock.patch.object( - indy.non_secrets, "open_wallet_search", mock.CoroutineMock() - ) as mock_indy_open_search, mock.patch.object( - indy.non_secrets, "close_wallet_search", mock.CoroutineMock() - ) as mock_indy_close_search: - mock_indy_open_search.return_value = 1 - search = storage.search_records("connection") - mock_indy_open_search.assert_not_awaited() - await search._open() - mock_indy_open_search.assert_awaited_once() - del search - c = 0 - # give the pending cleanup task time to be scheduled - while not mock_indy_close_search.await_count and c < 10: - await asyncio.sleep(0.1) - c += 1 - mock_indy_close_search.assert_awaited_with(1) - - with mock.patch.object( # error on close - indy.non_secrets, "open_wallet_search", mock.CoroutineMock() - ) as mock_indy_open_search, mock.patch.object( - indy.non_secrets, "close_wallet_search", mock.CoroutineMock() - ) as mock_indy_close_search: - mock_indy_close_search.side_effect = test_module.IndyError("no close") - mock_indy_open_search.return_value = 1 - search = storage.search_records("connection") - await search._open() - with pytest.raises(StorageSearchError): - await search.close() - - with mock.patch.object( # run on event loop until complete - indy.non_secrets, "open_wallet_search", mock.CoroutineMock() - ) as mock_indy_open_search, mock.patch.object( - indy.non_secrets, "close_wallet_search", mock.CoroutineMock() - ) as mock_indy_close_search, mock.patch.object( - asyncio, "get_event_loop", mock.MagicMock() - ) as mock_get_event_loop: - coros = [] - mock_get_event_loop.return_value = mock.MagicMock( - create_task=lambda c: coros.append(c), - is_running=mock.MagicMock(return_value=False), - run_until_complete=mock.MagicMock(), - ) - mock_indy_open_search.return_value = 1 - search = storage.search_records("connection") - await search._open() - del search - assert ( - coros - and len(coros) - == mock_get_event_loop.return_value.run_until_complete.call_count - ) - # now run the cleanup task - for coro in coros: - await coro - - with mock.patch.object( # run on event loop until complete - indy.non_secrets, "open_wallet_search", mock.CoroutineMock() - ) as mock_indy_open_search, mock.patch.object( - indy.non_secrets, "close_wallet_search", mock.CoroutineMock() - ) as mock_indy_close_search, mock.patch.object( - asyncio, "get_event_loop", mock.MagicMock() - ) as mock_get_event_loop: - coros = [] - mock_get_event_loop.return_value = mock.MagicMock( - create_task=lambda c: coros.append(c), - is_running=mock.MagicMock(return_value=False), - run_until_complete=mock.MagicMock(), - ) - mock_indy_open_search.return_value = 1 - mock_indy_close_search.side_effect = ValueError("Dave's not here") - search = storage.search_records("connection") - await search._open() - del search - assert ( - coros - and len(coros) - == mock_get_event_loop.return_value.run_until_complete.call_count - ) - # now run the cleanup task - for coro in coros: - await coro - - # TODO get these to run in docker ci/cd - @pytest.mark.asyncio - @pytest.mark.postgres - async def test_postgres_wallet_storage_works(self): - """ - Ensure that postgres wallet operations work (create and open wallet, store and search, drop wallet) - """ - postgres_url = os.environ.get("POSTGRES_URL") - if not postgres_url: - pytest.fail("POSTGRES_URL not configured") - - wallet_key = await IndySdkWallet.generate_wallet_key() - postgres_wallet = IndySdkWallet( - { - "auto_recreate": True, - "auto_remove": True, - "name": "test_pg_wallet", - "key": wallet_key, - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": '{"url":"' + postgres_url + '", "max_connections":5}', - "storage_creds": '{"account":"postgres","password":"mysecretpassword","admin_account":"postgres","admin_password":"mysecretpassword"}', - } - ) - await postgres_wallet.create() - await postgres_wallet.open() - - storage = IndySdkStorage(postgres_wallet) - - # add and then fetch a record - record = StorageRecord( - type="connection", - value=json.dumps( - { - "initiator": "self", - "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", - "state": "invitation", - "routing_state": "none", - "error_msg": None, - "their_label": None, - "created_at": "2019-05-14 21:58:24.143260+00:00", - "updated_at": "2019-05-14 21:58:24.143260+00:00", - } - ), - tags={ - "initiator": "self", - "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", - "state": "invitation", - "routing_state": "none", - }, - id="f96f76ec-0e9b-4f32-8237-f4219e6cf0c7", - ) - await storage.add_record(record) - g_rec = await storage.get_record(record.type, record.id) - - # now try search - search = None - try: - search = storage.search_records("connection") - await search.open() - records = await search.fetch(10) - finally: - if search: - await search.close() - - await postgres_wallet.close() - await postgres_wallet.remove() - - -@pytest.mark.indy -class TestIndySdkStorageSearch(test_in_memory_storage.TestInMemoryStorageSearch): - pass diff --git a/aries_cloudagent/storage/type.py b/aries_cloudagent/storage/type.py index 7a0cc9aab7..ea4279377f 100644 --- a/aries_cloudagent/storage/type.py +++ b/aries_cloudagent/storage/type.py @@ -1,3 +1,7 @@ """Library version information.""" RECORD_TYPE_ACAPY_STORAGE_TYPE = "acapy_storage_type" +RECORD_TYPE_ACAPY_UPGRADING = "acapy_upgrading" + +STORAGE_TYPE_VALUE_ANONCREDS = "askar-anoncreds" +STORAGE_TYPE_VALUE_ASKAR = "askar" diff --git a/aries_cloudagent/storage/vc_holder/indy.py b/aries_cloudagent/storage/vc_holder/indy.py deleted file mode 100644 index 60a96aa1bb..0000000000 --- a/aries_cloudagent/storage/vc_holder/indy.py +++ /dev/null @@ -1,161 +0,0 @@ -"""Indy-SDK storage implementation of VC holder interface.""" - -from dateutil.parser import parse as dateutil_parser -from dateutil.parser import ParserError -from typing import Mapping, Sequence - -from ...indy.sdk.wallet_setup import IndyOpenWallet - -from ..indy import IndySdkStorage, IndySdkStorageSearch - -from .base import VCHolder, VCRecordSearch -from .vc_record import VCRecord -from .xform import storage_to_vc_record, vc_to_storage_record, VC_CRED_RECORD_TYPE - - -class IndySdkVCHolder(VCHolder): - """Indy-SDK storage class.""" - - def __init__(self, wallet: IndyOpenWallet): - """Initialize the Indy-SDK VC holder instance.""" - self._wallet = wallet - self._store = IndySdkStorage(wallet) - - def build_type_or_schema_query(self, uri_list: Sequence[str]) -> dict: - """Build and return indy-specific type_or_schema_query.""" - type_or_schema_query = {"$and": []} - for uri in uri_list: - tag_or_list = [] - tag_or_list.append({f"type:{uri}": "1"}) - tag_or_list.append({f"schm:{uri}": "1"}) - type_or_schema_query["$and"].append({"$or": tag_or_list}) - return type_or_schema_query - - async def store_credential(self, cred: VCRecord): - """Add a new VC record to the store. - - Args: - cred: The VCRecord instance to store - Raises: - StorageDuplicateError: If the record_id is not unique - - """ - record = vc_to_storage_record(cred) - await self._store.add_record(record) - - async def retrieve_credential_by_id(self, record_id: str) -> VCRecord: - """Fetch a VC record by its record ID. - - Raises: - StorageNotFoundError: If the record is not found - - """ - record = await self._store.get_record(VC_CRED_RECORD_TYPE, record_id) - return storage_to_vc_record(record) - - async def retrieve_credential_by_given_id(self, given_id: str) -> VCRecord: - """Fetch a VC record by its given ID ('id' property). - - Raises: - StorageNotFoundError: If the record is not found - - """ - record = await self._store.find_record( - VC_CRED_RECORD_TYPE, {"given_id": given_id} - ) - return storage_to_vc_record(record) - - async def delete_credential(self, cred: VCRecord): - """Remove a previously-stored VC record. - - Raises: - StorageNotFoundError: If the record is not found - - """ - await self._store.delete_record(vc_to_storage_record(cred)) - - def search_credentials( - self, - contexts: Sequence[str] = None, - types: Sequence[str] = None, - schema_ids: Sequence[str] = None, - issuer_id: str = None, - subject_ids: str = None, - proof_types: Sequence[str] = None, - given_id: str = None, - tag_query: Mapping = None, - pd_uri_list: Sequence[str] = None, - ) -> "VCRecordSearch": - """Start a new VC record search. - - Args: - contexts: An inclusive list of JSON-LD contexts to match - types: An inclusive list of JSON-LD types to match - schema_ids: An inclusive list of credential schema identifiers - issuer_id: The ID of the credential issuer - subject_ids: The IDs of credential subjects all of which to match - proof_types: The signature suite types used for the proof objects. - given_id: The given id of the credential - tag_query: A tag filter clause - - """ - query = {} - if contexts: - for ctx_val in contexts: - query[f"ctxt:{ctx_val}"] = "1" - if types: - for type_val in types: - query[f"type:{type_val}"] = "1" - if schema_ids: - for schema_val in schema_ids: - query[f"schm:{schema_val}"] = "1" - if subject_ids: - for subject_id in subject_ids: - query[f"subj:{subject_id}"] = "1" - if proof_types: - for proof_type in proof_types: - query[f"ptyp:{proof_type}"] = "1" - if issuer_id: - query["issuer_id"] = issuer_id - if given_id: - query["given_id"] = given_id - if tag_query: - query.update(tag_query) - if pd_uri_list: - query.update(self.build_type_or_schema_query(pd_uri_list)) - search = self._store.search_records(VC_CRED_RECORD_TYPE, query) - return IndySdkVCRecordSearch(search) - - -class IndySdkVCRecordSearch(VCRecordSearch): - """Indy-SDK storage search for VC records.""" - - def __init__(self, search: IndySdkStorageSearch): - """Initialize the Indy-SDK VC record search.""" - self._search = search - - async def close(self): - """Dispose of the search query.""" - await self._search.close() - - async def fetch(self, max_count: int = None) -> Sequence[VCRecord]: - """Fetch the next list of VC records from the store. - - Args: - max_count: Max number of records to return. If not provided, - defaults to the backend's preferred page size - - Returns: - A list of `VCRecord` instances - - """ - rows = await self._search.fetch(max_count) - records = [storage_to_vc_record(r) for r in rows] - try: - records.sort( - key=lambda v: dateutil_parser(v.cred_value.get("issuanceDate")), - reverse=True, - ) - return records - except ParserError: - return records diff --git a/aries_cloudagent/storage/vc_holder/tests/test_indy_vc_holder.py b/aries_cloudagent/storage/vc_holder/tests/test_indy_vc_holder.py deleted file mode 100644 index 3009e61b1e..0000000000 --- a/aries_cloudagent/storage/vc_holder/tests/test_indy_vc_holder.py +++ /dev/null @@ -1,44 +0,0 @@ -import pytest -from unittest import mock - - -from ....config.injection_context import InjectionContext -from ....indy.sdk.profile import IndySdkProfileManager, IndySdkProfile -from ....ledger.indy import IndySdkLedgerPool -from ....wallet.indy import IndySdkWallet - -from ..base import VCHolder - -from . import test_in_memory_vc_holder as in_memory - - -async def make_profile(): - key = await IndySdkWallet.generate_wallet_key() - context = InjectionContext() - context.injector.bind_instance(IndySdkLedgerPool, IndySdkLedgerPool("name")) - - with mock.patch.object(IndySdkProfile, "_make_finalizer"): - return await IndySdkProfileManager().provision( - context, - { - "auto_recreate": True, - "auto_remove": True, - "name": "test-wallet", - "key": key, - "key_derivation_method": "RAW", # much slower tests with argon-hashed keys - }, - ) - - -@pytest.fixture() -async def holder(): - profile = await make_profile() - async with profile.session() as session: - yield session.inject(VCHolder) - await profile.close() - - -@pytest.mark.indy -class TestIndySdkVCHolder(in_memory.TestInMemoryVCHolder): - # run same test suite with different holder fixture - pass diff --git a/aries_cloudagent/storage/vc_holder/vc_record.py b/aries_cloudagent/storage/vc_holder/vc_record.py index 697c676c47..327a7ee3c2 100644 --- a/aries_cloudagent/storage/vc_holder/vc_record.py +++ b/aries_cloudagent/storage/vc_holder/vc_record.py @@ -2,9 +2,9 @@ import logging from typing import Mapping, Sequence -from uuid import uuid4 from marshmallow import EXCLUDE, fields +from uuid_utils import uuid4 from ...messaging.models.base import BaseModel, BaseModelSchema from ...messaging.valid import ENDPOINT_EXAMPLE, ENDPOINT_VALIDATE, UUID4_EXAMPLE diff --git a/aries_cloudagent/tails/tests/test_indy.py b/aries_cloudagent/tails/tests/test_indy.py index 1bbdcbefe4..8e9eddb208 100644 --- a/aries_cloudagent/tails/tests/test_indy.py +++ b/aries_cloudagent/tails/tests/test_indy.py @@ -44,37 +44,6 @@ async def test_upload(self): text == context.settings["tails_server_upload_url"] + "/" + REV_REG_ID ) - async def test_upload_indy_sdk(self): - profile = InMemoryProfile.test_profile() - profile.settings["tails_server_upload_url"] = "http://1.2.3.4:8088" - profile.context.injector.bind_instance( - BaseMultipleLedgerManager, - mock.MagicMock( - get_write_ledgers=mock.CoroutineMock( - return_value=[ - "test_ledger_id_1", - "test_ledger_id_2", - ] - ) - ), - ) - profile.context.injector.bind_instance(BaseLedger, mock.MagicMock()) - indy_tails = test_module.IndyTailsServer() - - with mock.patch.object( - test_module, "put_file", mock.CoroutineMock() - ) as mock_put: - mock_put.return_value = "tails-hash" - (ok, text) = await indy_tails.upload_tails_file( - profile.context, - REV_REG_ID, - "/tmp/dummy/path", - ) - assert ok - assert ( - text == profile.settings["tails_server_upload_url"] + "/" + REV_REG_ID - ) - async def test_upload_indy_vdr(self): profile = InMemoryProfile.test_profile() profile.settings["tails_server_upload_url"] = "http://1.2.3.4:8088" diff --git a/aries_cloudagent/transport/inbound/manager.py b/aries_cloudagent/transport/inbound/manager.py index 0969d1c477..74400e6714 100644 --- a/aries_cloudagent/transport/inbound/manager.py +++ b/aries_cloudagent/transport/inbound/manager.py @@ -1,17 +1,16 @@ """Inbound transport manager.""" import logging -import uuid from collections import OrderedDict from typing import Callable, Coroutine +from uuid_utils import uuid4 + from ...core.profile import Profile -from ...utils.classloader import ClassLoader, ModuleLoadError, ClassNotFoundError +from ...utils.classloader import ClassLoader, ClassNotFoundError, ModuleLoadError from ...utils.task_queue import CompletedTask, TaskQueue - from ..outbound.message import OutboundMessage from ..wire_format import BaseWireFormat - from .base import ( BaseInboundTransport, InboundTransportConfiguration, @@ -164,7 +163,7 @@ async def create_session( client_info=client_info, close_handler=self.closed_session, inbound_handler=self.receive_inbound, - session_id=str(uuid.uuid4()), + session_id=str(uuid4()), transport_type=transport_type, wire_format=wire_format, ) diff --git a/aries_cloudagent/utils/dependencies.py b/aries_cloudagent/utils/dependencies.py index 461c70664c..ec4c220871 100644 --- a/aries_cloudagent/utils/dependencies.py +++ b/aries_cloudagent/utils/dependencies.py @@ -3,27 +3,6 @@ import sys -def is_indy_sdk_module_installed(): - """Check whether indy (indy-sdk) module is installed. - - Returns: - bool: Whether indy (indy-sdk) is installed. - - """ - try: - # Check if already imported - if "indy" in sys.modules: - return True - - # Try to import - import indy # noqa: F401 - - return True - except ModuleNotFoundError: - # Not installed if import went wrong - return False - - def is_ursa_bbs_signatures_module_installed(): """Check whether ursa_bbs_signatures module is installed. diff --git a/aries_cloudagent/utils/general.py b/aries_cloudagent/utils/general.py new file mode 100644 index 0000000000..7c01793a07 --- /dev/null +++ b/aries_cloudagent/utils/general.py @@ -0,0 +1,10 @@ +"""Utility functions for the admin server.""" + +from hmac import compare_digest + + +def const_compare(string1, string2): + """Compare two strings in constant time.""" + if string1 is None or string2 is None: + return False + return compare_digest(string1.encode(), string2.encode()) diff --git a/aries_cloudagent/utils/profiles.py b/aries_cloudagent/utils/profiles.py index 45a440ed79..d5433f3afd 100644 --- a/aries_cloudagent/utils/profiles.py +++ b/aries_cloudagent/utils/profiles.py @@ -1,10 +1,15 @@ """Profile utilities.""" +import json + from aiohttp import web from ..anoncreds.error_messages import ANONCREDS_PROFILE_REQUIRED_MSG from ..askar.profile_anon import AskarAnoncredsProfile from ..core.profile import Profile +from ..multitenant.manager import MultitenantManager +from ..storage.base import BaseStorageSearch +from ..wallet.models.wallet_record import WalletRecord def is_anoncreds_profile_raise_web_exception(profile: Profile) -> None: @@ -29,3 +34,26 @@ def subwallet_type_not_same_as_base_wallet_raise_web_exception( raise web.HTTPForbidden( reason="Subwallet type must be the same as the base wallet type" ) + + +async def get_subwallet_profiles_from_storage(root_profile: Profile) -> list[Profile]: + """Get subwallet profiles from storage.""" + subwallet_profiles = [] + base_storage_search = root_profile.inject(BaseStorageSearch) + search_session = base_storage_search.search_records( + type_filter=WalletRecord.RECORD_TYPE, page_size=10 + ) + while search_session._done is False: + wallet_storage_records = await search_session.fetch() + for wallet_storage_record in wallet_storage_records: + wallet_record = WalletRecord.from_storage( + wallet_storage_record.id, + json.loads(wallet_storage_record.value), + ) + subwallet_profiles.append( + await MultitenantManager(root_profile).get_wallet_profile( + base_context=root_profile.context, + wallet_record=wallet_record, + ) + ) + return subwallet_profiles diff --git a/aries_cloudagent/vc/routes.py b/aries_cloudagent/vc/routes.py index 3cafdff542..47e7d1b8f6 100644 --- a/aries_cloudagent/vc/routes.py +++ b/aries_cloudagent/vc/routes.py @@ -3,30 +3,26 @@ from aiohttp import web from aiohttp_apispec import docs, request_schema, response_schema from marshmallow.exceptions import ValidationError -import uuid +from uuid_utils import uuid4 + +from ..admin.decorators.auth import tenant_authentication from ..admin.request_context import AdminRequestContext -from ..storage.error import StorageError, StorageNotFoundError, StorageDuplicateError -from ..wallet.error import WalletError -from ..wallet.base import BaseWallet from ..config.base import InjectionError from ..resolver.base import ResolverError +from ..storage.error import StorageDuplicateError, StorageError, StorageNotFoundError from ..storage.vc_holder.base import VCHolder +from ..wallet.base import BaseWallet +from ..wallet.error import WalletError +from .vc_ld.manager import VcLdpManager, VcLdpManagerError from .vc_ld.models import web_schemas -from .vc_ld.manager import VcLdpManager -from .vc_ld.manager import VcLdpManagerError -from .vc_ld.models.credential import ( - VerifiableCredential, -) - -from .vc_ld.models.presentation import ( - VerifiablePresentation, -) - +from .vc_ld.models.credential import VerifiableCredential from .vc_ld.models.options import LDProofVCOptions +from .vc_ld.models.presentation import VerifiablePresentation @docs(tags=["vc-api"], summary="List credentials") @response_schema(web_schemas.ListCredentialsResponse(), 200, description="") +@tenant_authentication async def list_credentials_route(request: web.BaseRequest): """Request handler for issuing a credential. @@ -46,6 +42,7 @@ async def list_credentials_route(request: web.BaseRequest): @docs(tags=["vc-api"], summary="Fetch credential by ID") @response_schema(web_schemas.FetchCredentialResponse(), 200, description="") +@tenant_authentication async def fetch_credential_route(request: web.BaseRequest): """Request handler for issuing a credential. @@ -66,6 +63,7 @@ async def fetch_credential_route(request: web.BaseRequest): @docs(tags=["vc-api"], summary="Issue a credential") @request_schema(web_schemas.IssueCredentialRequest()) @response_schema(web_schemas.IssueCredentialResponse(), 200, description="") +@tenant_authentication async def issue_credential_route(request: web.BaseRequest): """Request handler for issuing a credential. @@ -107,6 +105,7 @@ async def issue_credential_route(request: web.BaseRequest): @docs(tags=["vc-api"], summary="Verify a credential") @request_schema(web_schemas.VerifyCredentialRequest()) @response_schema(web_schemas.VerifyCredentialResponse(), 200, description="") +@tenant_authentication async def verify_credential_route(request: web.BaseRequest): """Request handler for verifying a credential. @@ -147,7 +146,7 @@ async def store_credential_route(request: web.BaseRequest): try: vc = body["verifiableCredential"] - cred_id = vc["id"] if "id" in vc else f"urn:uuid:{str(uuid.uuid4())}" + cred_id = vc["id"] if "id" in vc else f"urn:uuid:{str(uuid4())}" options = {} if "options" not in body else body["options"] vc = VerifiableCredential.deserialize(vc) @@ -171,6 +170,7 @@ async def store_credential_route(request: web.BaseRequest): @docs(tags=["vc-api"], summary="Prove a presentation") @request_schema(web_schemas.ProvePresentationRequest()) @response_schema(web_schemas.ProvePresentationResponse(), 200, description="") +@tenant_authentication async def prove_presentation_route(request: web.BaseRequest): """Request handler for proving a presentation. @@ -211,6 +211,7 @@ async def prove_presentation_route(request: web.BaseRequest): @docs(tags=["vc-api"], summary="Verify a Presentation") @request_schema(web_schemas.VerifyPresentationRequest()) @response_schema(web_schemas.VerifyPresentationResponse(), 200, description="") +@tenant_authentication async def verify_presentation_route(request: web.BaseRequest): """Request handler for verifying a presentation. diff --git a/aries_cloudagent/vc/vc_ld/models/linked_data_proof.py b/aries_cloudagent/vc/vc_ld/models/linked_data_proof.py index 40e5a2b7db..6787e82be7 100644 --- a/aries_cloudagent/vc/vc_ld/models/linked_data_proof.py +++ b/aries_cloudagent/vc/vc_ld/models/linked_data_proof.py @@ -105,9 +105,6 @@ class Meta: domain = fields.Str( required=False, - # TODO the domain can be more than a Uri, provide a less restrictive validation - # https://www.w3.org/TR/vc-data-integrity/#defn-domain - validate=Uri(), metadata={ "description": ( "A string value specifying the restricted domain of the signature." diff --git a/aries_cloudagent/wallet/anoncreds_upgrade.py b/aries_cloudagent/wallet/anoncreds_upgrade.py new file mode 100644 index 0000000000..4e9f16e8bc --- /dev/null +++ b/aries_cloudagent/wallet/anoncreds_upgrade.py @@ -0,0 +1,719 @@ +"""Functions for upgrading records to anoncreds.""" + +import asyncio +import json +import logging +from typing import Optional + +from anoncreds import ( + CredentialDefinition, + CredentialDefinitionPrivate, + KeyCorrectnessProof, + RevocationRegistryDefinitionPrivate, + Schema, +) +from aries_askar import AskarError +from indy_credx import LinkSecret + +from ..anoncreds.issuer import ( + CATEGORY_CRED_DEF, + CATEGORY_CRED_DEF_KEY_PROOF, + CATEGORY_CRED_DEF_PRIVATE, + CATEGORY_SCHEMA, +) +from ..anoncreds.models.anoncreds_cred_def import CredDef, CredDefState +from ..anoncreds.models.anoncreds_revocation import ( + RevList, + RevListState, + RevRegDef, + RevRegDefState, + RevRegDefValue, +) +from ..anoncreds.models.anoncreds_schema import SchemaState +from ..anoncreds.revocation import ( + CATEGORY_REV_LIST, + CATEGORY_REV_REG_DEF, + CATEGORY_REV_REG_DEF_PRIVATE, +) +from ..cache.base import BaseCache +from ..core.profile import Profile +from ..indy.credx.holder import CATEGORY_LINK_SECRET, IndyCredxHolder +from ..ledger.multiple_ledger.ledger_requests_executor import ( + GET_CRED_DEF, + GET_SCHEMA, + IndyLedgerRequestsExecutor, +) +from ..messaging.credential_definitions.util import CRED_DEF_SENT_RECORD_TYPE +from ..messaging.schemas.util import SCHEMA_SENT_RECORD_TYPE +from ..multitenant.base import BaseMultitenantManager +from ..revocation.models.issuer_cred_rev_record import IssuerCredRevRecord +from ..revocation.models.issuer_rev_reg_record import IssuerRevRegRecord +from ..storage.base import BaseStorage +from ..storage.error import StorageNotFoundError +from ..storage.record import StorageRecord +from ..storage.type import ( + RECORD_TYPE_ACAPY_STORAGE_TYPE, + RECORD_TYPE_ACAPY_UPGRADING, + STORAGE_TYPE_VALUE_ANONCREDS, +) +from .singletons import IsAnoncredsSingleton, UpgradeInProgressSingleton + +LOGGER = logging.getLogger(__name__) + +UPGRADING_RECORD_IN_PROGRESS = "anoncreds_in_progress" +UPGRADING_RECORD_FINISHED = "anoncreds_finished" + +# Number of times to retry upgrading records +max_retries = 5 + + +class SchemaUpgradeObj: + """Schema upgrade object.""" + + def __init__( + self, + schema_id: str, + schema: Schema, + name: str, + version: str, + issuer_id: str, + old_record_id: str, + ): + """Initialize schema upgrade object.""" + self.schema_id = schema_id + self.schema = schema + self.name = name + self.version = version + self.issuer_id = issuer_id + self.old_record_id = old_record_id + + +class CredDefUpgradeObj: + """Cred def upgrade object.""" + + def __init__( + self, + cred_def_id: str, + cred_def: CredentialDefinition, + cred_def_private: CredentialDefinitionPrivate, + key_proof: KeyCorrectnessProof, + revocation: Optional[bool] = None, + askar_cred_def: Optional[any] = None, + max_cred_num: Optional[int] = None, + ): + """Initialize cred def upgrade object.""" + self.cred_def_id = cred_def_id + self.cred_def = cred_def + self.cred_def_private = cred_def_private + self.key_proof = key_proof + self.revocation = revocation + self.askar_cred_def = askar_cred_def + self.max_cred_num = max_cred_num + + +class RevRegDefUpgradeObj: + """Rev reg def upgrade object.""" + + def __init__( + self, + rev_reg_def_id: str, + rev_reg_def: RevRegDef, + rev_reg_def_private: RevocationRegistryDefinitionPrivate, + active: bool = False, + ): + """Initialize rev reg def upgrade object.""" + self.rev_reg_def_id = rev_reg_def_id + self.rev_reg_def = rev_reg_def + self.rev_reg_def_private = rev_reg_def_private + self.active = active + + +class RevListUpgradeObj: + """Rev entry upgrade object.""" + + def __init__( + self, + rev_list: RevList, + pending: list, + rev_reg_def_id: str, + cred_rev_records: list, + ): + """Initialize rev entry upgrade object.""" + self.rev_list = rev_list + self.pending = pending + self.rev_reg_def_id = rev_reg_def_id + self.cred_rev_records = cred_rev_records + + +async def get_schema_upgrade_object( + profile: Profile, schema_id: str, askar_schema +) -> SchemaUpgradeObj: + """Get schema upgrade object.""" + + async with profile.session() as session: + schema_id = askar_schema.tags.get("schema_id") + issuer_did = askar_schema.tags.get("schema_issuer_did") + # Need to get schema from the ledger because the attribute names + # are not stored in the wallet + multitenant_mgr = session.inject_or(BaseMultitenantManager) + if multitenant_mgr: + ledger_exec_inst = IndyLedgerRequestsExecutor(profile) + else: + ledger_exec_inst = session.inject(IndyLedgerRequestsExecutor) + + _, ledger = await ledger_exec_inst.get_ledger_for_identifier( + schema_id, + txn_record_type=GET_SCHEMA, + ) + async with ledger: + schema_from_ledger = await ledger.get_schema(schema_id) + + return SchemaUpgradeObj( + schema_id, + Schema.create( + schema_id, + askar_schema.tags.get("schema_name"), + issuer_did, + schema_from_ledger["attrNames"], + ), + askar_schema.tags.get("schema_name"), + askar_schema.tags.get("schema_version"), + issuer_did, + askar_schema.id, + ) + + +async def get_cred_def_upgrade_object( + profile: Profile, askar_cred_def +) -> CredDefUpgradeObj: + """Get cred def upgrade object.""" + cred_def_id = askar_cred_def.tags.get("cred_def_id") + async with profile.session() as session: + # Need to get cred_def from the ledger because the tag + # is not stored in the wallet and don't know wether it supports revocation + multitenant_mgr = session.inject_or(BaseMultitenantManager) + if multitenant_mgr: + ledger_exec_inst = IndyLedgerRequestsExecutor(profile) + else: + ledger_exec_inst = session.inject(IndyLedgerRequestsExecutor) + _, ledger = await ledger_exec_inst.get_ledger_for_identifier( + cred_def_id, + txn_record_type=GET_CRED_DEF, + ) + async with ledger: + cred_def_from_ledger = await ledger.get_credential_definition(cred_def_id) + + async with profile.session() as session: + storage = session.inject(BaseStorage) + askar_cred_def_private = await storage.get_record( + CATEGORY_CRED_DEF_PRIVATE, cred_def_id + ) + askar_cred_def_key_proof = await storage.get_record( + CATEGORY_CRED_DEF_KEY_PROOF, cred_def_id + ) + + cred_def = CredDef( + issuer_id=askar_cred_def.tags.get("issuer_did"), + schema_id=askar_cred_def.tags.get("schema_id"), + tag=cred_def_from_ledger["tag"], + type=cred_def_from_ledger["type"], + value=cred_def_from_ledger["value"], + ) + + return CredDefUpgradeObj( + cred_def_id, + cred_def, + askar_cred_def_private.value, + askar_cred_def_key_proof.value, + cred_def_from_ledger["value"].get("revocation", None), + askar_cred_def=askar_cred_def, + ) + + +async def get_rev_reg_def_upgrade_object( + profile: Profile, + cred_def_upgrade_obj: CredDefUpgradeObj, + askar_issuer_rev_reg_def, + is_active: bool, +) -> RevRegDefUpgradeObj: + """Get rev reg def upgrade object.""" + rev_reg_def_id = askar_issuer_rev_reg_def.tags.get("revoc_reg_id") + + async with profile.session() as session: + storage = session.inject(BaseStorage) + askar_reg_rev_def_private = await storage.get_record( + CATEGORY_REV_REG_DEF_PRIVATE, rev_reg_def_id + ) + + revoc_reg_def_values = json.loads(askar_issuer_rev_reg_def.value) + + reg_def_value = RevRegDefValue( + revoc_reg_def_values["revoc_reg_def"]["value"]["publicKeys"], + revoc_reg_def_values["revoc_reg_def"]["value"]["maxCredNum"], + revoc_reg_def_values["revoc_reg_def"]["value"]["tailsLocation"], + revoc_reg_def_values["revoc_reg_def"]["value"]["tailsHash"], + ) + + rev_reg_def = RevRegDef( + issuer_id=askar_issuer_rev_reg_def.tags.get("issuer_did"), + cred_def_id=cred_def_upgrade_obj.cred_def_id, + tag=revoc_reg_def_values["tag"], + type=revoc_reg_def_values["revoc_def_type"], + value=reg_def_value, + ) + + return RevRegDefUpgradeObj( + rev_reg_def_id, rev_reg_def, askar_reg_rev_def_private.value, is_active + ) + + +async def get_rev_list_upgrade_object( + profile: Profile, rev_reg_def_upgrade_obj: RevRegDefUpgradeObj +) -> RevListUpgradeObj: + """Get revocation entry upgrade object.""" + rev_reg = rev_reg_def_upgrade_obj.rev_reg_def + async with profile.session() as session: + storage = session.inject(BaseStorage) + askar_cred_rev_records = await storage.find_all_records( + IssuerCredRevRecord.RECORD_TYPE, + {"rev_reg_id": rev_reg_def_upgrade_obj.rev_reg_def_id}, + ) + + revocation_list = [0] * rev_reg.value.max_cred_num + for askar_cred_rev_record in askar_cred_rev_records: + if askar_cred_rev_record.tags.get("state") == "revoked": + revocation_list[int(askar_cred_rev_record.tags.get("cred_rev_id")) - 1] = 1 + + rev_list = RevList( + issuer_id=rev_reg.issuer_id, + rev_reg_def_id=rev_reg_def_upgrade_obj.rev_reg_def_id, + revocation_list=revocation_list, + current_accumulator=json.loads( + rev_reg_def_upgrade_obj.askar_issuer_rev_reg_def.value + )["revoc_reg_entry"]["value"]["accum"], + ) + + return RevListUpgradeObj( + rev_list, + json.loads(rev_reg_def_upgrade_obj.askar_issuer_rev_reg_def.value)[ + "pending_pub" + ], + rev_reg_def_upgrade_obj.rev_reg_def_id, + askar_cred_rev_records, + ) + + +async def upgrade_and_delete_schema_records( + txn, schema_upgrade_obj: SchemaUpgradeObj +) -> None: + """Upgrade and delete schema records.""" + schema_anoncreds = schema_upgrade_obj.schema + await txn.handle.remove("schema_sent", schema_upgrade_obj.old_record_id) + await txn.handle.replace( + CATEGORY_SCHEMA, + schema_upgrade_obj.schema_id, + schema_anoncreds.to_json(), + { + "name": schema_upgrade_obj.name, + "version": schema_upgrade_obj.version, + "issuer_id": schema_upgrade_obj.issuer_id, + "state": SchemaState.STATE_FINISHED, + }, + ) + + +async def upgrade_and_delete_cred_def_records( + txn, anoncreds_schema, cred_def_upgrade_obj: CredDefUpgradeObj +) -> None: + """Upgrade and delete cred def records.""" + cred_def_id = cred_def_upgrade_obj.cred_def_id + anoncreds_schema = anoncreds_schema.to_dict() + askar_cred_def = cred_def_upgrade_obj.askar_cred_def + await txn.handle.remove("cred_def_sent", askar_cred_def.id) + await txn.handle.replace( + CATEGORY_CRED_DEF, + cred_def_id, + cred_def_upgrade_obj.cred_def.to_json(), + tags={ + "schema_id": askar_cred_def.tags.get("schema_id"), + "schema_issuer_id": anoncreds_schema["issuerId"], + "issuer_id": askar_cred_def.tags.get("issuer_did"), + "schema_name": anoncreds_schema["name"], + "schema_version": anoncreds_schema["version"], + "state": CredDefState.STATE_FINISHED, + "epoch": askar_cred_def.tags.get("epoch"), + # TODO We need to keep track of these but tags probably + # isn't ideal. This suggests that a full record object + # is necessary for non-private values + "support_revocation": json.dumps(cred_def_upgrade_obj.revocation), + "max_cred_num": str(cred_def_upgrade_obj.max_cred_num or 0), + }, + ) + await txn.handle.replace( + CATEGORY_CRED_DEF_PRIVATE, + cred_def_id, + CredentialDefinitionPrivate.load( + cred_def_upgrade_obj.cred_def_private + ).to_json_buffer(), + ) + await txn.handle.replace( + CATEGORY_CRED_DEF_KEY_PROOF, + cred_def_id, + KeyCorrectnessProof.load(cred_def_upgrade_obj.key_proof).to_json_buffer(), + ) + + +rev_reg_states_mapping = { + "init": RevRegDefState.STATE_WAIT, + "generated": RevRegDefState.STATE_ACTION, + "posted": RevRegDefState.STATE_FINISHED, + "active": RevRegDefState.STATE_FINISHED, + "full": RevRegDefState.STATE_FULL, + "decommissioned": RevRegDefState.STATE_DECOMMISSIONED, +} + + +async def upgrade_and_delete_rev_reg_def_records( + txn, rev_reg_def_upgrade_obj: RevRegDefUpgradeObj +) -> None: + """Upgrade and delete rev reg def records.""" + rev_reg_def_id = rev_reg_def_upgrade_obj.rev_reg_def_id + askar_issuer_rev_reg_def = rev_reg_def_upgrade_obj.askar_issuer_rev_reg_def + await txn.handle.remove(IssuerRevRegRecord.RECORD_TYPE, askar_issuer_rev_reg_def.id) + await txn.handle.replace( + CATEGORY_REV_REG_DEF, + rev_reg_def_id, + rev_reg_def_upgrade_obj.rev_reg_def.to_json(), + tags={ + "cred_def_id": rev_reg_def_upgrade_obj.rev_reg_def.cred_def_id, + "issuer_id": askar_issuer_rev_reg_def.tags.get("issuer_did"), + "state": rev_reg_states_mapping[askar_issuer_rev_reg_def.tags.get("state")], + "active": json.dumps(rev_reg_def_upgrade_obj.active), + }, + ) + await txn.handle.replace( + CATEGORY_REV_REG_DEF_PRIVATE, + rev_reg_def_id, + RevocationRegistryDefinitionPrivate.load( + rev_reg_def_upgrade_obj.rev_reg_def_private + ).to_json_buffer(), + ) + + +async def upgrade_and_delete_rev_entry_records( + txn, rev_list_upgrade_obj: RevListUpgradeObj +) -> None: + """Upgrade and delete revocation entry records.""" + next_index = 0 + for cred_rev_record in rev_list_upgrade_obj.cred_rev_records: + if int(cred_rev_record.tags.get("cred_rev_id")) > next_index: + next_index = int(cred_rev_record.tags.get("cred_rev_id")) + await txn.handle.remove(IssuerCredRevRecord.RECORD_TYPE, cred_rev_record.id) + + await txn.handle.insert( + CATEGORY_REV_LIST, + rev_list_upgrade_obj.rev_reg_def_id, + value_json={ + "rev_list": rev_list_upgrade_obj.rev_list.serialize(), + "pending": rev_list_upgrade_obj.pending, + "next_index": next_index + 1, + }, + tags={ + "state": RevListState.STATE_FINISHED, + "pending": json.dumps(rev_list_upgrade_obj.pending is not None), + }, + ) + + +async def upgrade_all_records_with_transaction( + txn: any, + schema_upgrade_objs: list[SchemaUpgradeObj], + cred_def_upgrade_objs: list[CredDefUpgradeObj], + rev_reg_def_upgrade_objs: list[RevRegDefUpgradeObj], + rev_list_upgrade_objs: list[RevListUpgradeObj], + link_secret: Optional[LinkSecret] = None, +) -> None: + """Upgrade all objects with transaction.""" + for schema_upgrade_obj in schema_upgrade_objs: + await upgrade_and_delete_schema_records(txn, schema_upgrade_obj) + for cred_def_upgrade_obj in cred_def_upgrade_objs: + await upgrade_and_delete_cred_def_records( + txn, schema_upgrade_obj.schema, cred_def_upgrade_obj + ) + for rev_reg_def_upgrade_obj in rev_reg_def_upgrade_objs: + await upgrade_and_delete_rev_reg_def_records(txn, rev_reg_def_upgrade_obj) + for rev_list_upgrade_obj in rev_list_upgrade_objs: + await upgrade_and_delete_rev_entry_records(txn, rev_list_upgrade_obj) + + if link_secret: + await txn.handle.replace( + CATEGORY_LINK_SECRET, + IndyCredxHolder.LINK_SECRET_ID, + link_secret.to_dict()["value"]["ms"].encode("ascii"), + ) + + await txn.commit() + + +async def get_rev_reg_def_upgrade_objs( + profile: Profile, + cred_def_upgrade_obj: CredDefUpgradeObj, + rev_list_upgrade_objs: list[RevListUpgradeObj], +) -> list[RevRegDefUpgradeObj]: + """Get rev reg def upgrade objects.""" + + rev_reg_def_upgrade_objs = [] + async with profile.session() as session: + storage = session.inject(BaseStorage) + # Must be sorted to find the active rev reg def + askar_issuer_rev_reg_def_records = sorted( + await storage.find_all_records( + IssuerRevRegRecord.RECORD_TYPE, + {"cred_def_id": cred_def_upgrade_obj.cred_def_id}, + ), + key=lambda x: json.loads(x.value)["created_at"], + ) + found_active = False + for askar_issuer_rev_reg_def in askar_issuer_rev_reg_def_records: + # active rev reg def is the oldest non-full and active rev reg def + if ( + not found_active + and askar_issuer_rev_reg_def.tags.get("state") != "full" + and askar_issuer_rev_reg_def.tags.get("state") == "active" + ): + found_active = True + is_active = True + + rev_reg_def_upgrade_obj = await get_rev_reg_def_upgrade_object( + profile, + cred_def_upgrade_obj, + askar_issuer_rev_reg_def, + is_active, + ) + is_active = False + rev_reg_def_upgrade_obj.askar_issuer_rev_reg_def = askar_issuer_rev_reg_def + + rev_reg_def_upgrade_objs.append(rev_reg_def_upgrade_obj) + + # add the revocation list upgrade object from reg def upgrade object + rev_list_upgrade_objs.append( + await get_rev_list_upgrade_object(profile, rev_reg_def_upgrade_obj) + ) + return rev_reg_def_upgrade_objs + + +async def convert_records_to_anoncreds(profile) -> None: + """Convert and delete old askar records.""" + async with profile.session() as session: + storage = session.inject(BaseStorage) + askar_schema_records = await storage.find_all_records(SCHEMA_SENT_RECORD_TYPE) + + schema_upgrade_objs = [] + cred_def_upgrade_objs = [] + rev_reg_def_upgrade_objs = [] + rev_list_upgrade_objs = [] + + # Schemas + for askar_schema in askar_schema_records: + schema_upgrade_objs.append( + await get_schema_upgrade_object(profile, askar_schema.id, askar_schema) + ) + + # CredDefs and Revocation Objects + askar_cred_def_records = await storage.find_all_records( + CRED_DEF_SENT_RECORD_TYPE, {} + ) + for askar_cred_def in askar_cred_def_records: + cred_def_upgrade_obj = await get_cred_def_upgrade_object( + profile, askar_cred_def + ) + rev_reg_def_upgrade_objs = await get_rev_reg_def_upgrade_objs( + profile, cred_def_upgrade_obj, rev_list_upgrade_objs + ) + # update the cred_def with the max_cred_num from first rev_reg_def + if rev_reg_def_upgrade_objs: + cred_def_upgrade_obj.max_cred_num = rev_reg_def_upgrade_objs[ + 0 + ].rev_reg_def.value.max_cred_num + cred_def_upgrade_objs.append(cred_def_upgrade_obj) + + # Link secret + link_secret_record = None + try: + link_secret_record = await session.handle.fetch( + CATEGORY_LINK_SECRET, IndyCredxHolder.LINK_SECRET_ID + ) + except AskarError: + pass + + link_secret = None + if link_secret_record: + link_secret = LinkSecret.load(link_secret_record.raw_value) + + async with profile.transaction() as txn: + try: + await upgrade_all_records_with_transaction( + txn, + schema_upgrade_objs, + cred_def_upgrade_objs, + rev_reg_def_upgrade_objs, + rev_list_upgrade_objs, + link_secret, + ) + except Exception as e: + await txn.rollback() + raise e + + +async def retry_converting_records( + profile: Profile, upgrading_record: StorageRecord, retry: int, is_subwallet=False +) -> None: + """Retry converting records to anoncreds.""" + + async def fail_upgrade(): + async with profile.session() as session: + storage = session.inject(BaseStorage) + await storage.delete_record(upgrading_record) + + try: + await convert_records_to_anoncreds(profile) + await finish_upgrade_by_updating_profile_or_shutting_down(profile, is_subwallet) + LOGGER.info(f"Upgrade complete via retry for wallet: {profile.name}") + except Exception as e: + LOGGER.error(f"Error when upgrading records for wallet {profile.name} : {e} ") + if retry < max_retries: + LOGGER.info(f"Retry attempt {retry + 1} to upgrade wallet {profile.name}") + await asyncio.sleep(1) + await retry_converting_records( + profile, upgrading_record, retry + 1, is_subwallet + ) + else: + LOGGER.error( + f"""Failed to upgrade wallet: {profile.name} after 5 retries. + Try fixing any connection issues and re-running the update""" + ) + await fail_upgrade() + + +async def upgrade_wallet_to_anoncreds_if_requested( + profile: Profile, is_subwallet=False +) -> None: + """Get upgrading record and attempt to upgrade wallet to anoncreds.""" + async with profile.session() as session: + storage = session.inject(BaseStorage) + try: + upgrading_record = await storage.find_record( + RECORD_TYPE_ACAPY_UPGRADING, {} + ) + if upgrading_record.value == UPGRADING_RECORD_FINISHED: + IsAnoncredsSingleton().set_wallet(profile.name) + return + except StorageNotFoundError: + return + + try: + LOGGER.info("Upgrade in process for wallet: %s", profile.name) + await convert_records_to_anoncreds(profile) + await finish_upgrade_by_updating_profile_or_shutting_down( + profile, is_subwallet + ) + except Exception as e: + LOGGER.error(f"Error when upgrading wallet {profile.name} : {e} ") + await retry_converting_records(profile, upgrading_record, 0, is_subwallet) + + +async def finish_upgrade(profile: Profile): + """Finish record by setting records and caches.""" + async with profile.session() as session: + storage = session.inject(BaseStorage) + try: + storage_type_record = await storage.find_record( + type_filter=RECORD_TYPE_ACAPY_STORAGE_TYPE, tag_query={} + ) + await storage.update_record( + storage_type_record, STORAGE_TYPE_VALUE_ANONCREDS, {} + ) + # This should only happen for subwallets + except StorageNotFoundError: + await storage.add_record( + StorageRecord( + RECORD_TYPE_ACAPY_STORAGE_TYPE, + STORAGE_TYPE_VALUE_ANONCREDS, + ) + ) + await finish_upgrading_record(profile) + IsAnoncredsSingleton().set_wallet(profile.name) + UpgradeInProgressSingleton().remove_wallet(profile.name) + + +async def finish_upgrading_record(profile: Profile): + """Update upgrading record to finished.""" + async with profile.session() as session: + storage = session.inject(BaseStorage) + try: + upgrading_record = await storage.find_record( + RECORD_TYPE_ACAPY_UPGRADING, tag_query={} + ) + await storage.update_record(upgrading_record, UPGRADING_RECORD_FINISHED, {}) + except StorageNotFoundError: + return + + +async def upgrade_subwallet(profile: Profile) -> None: + """Upgrade subwallet to anoncreds.""" + async with profile.session() as session: + multitenant_mgr = session.inject_or(BaseMultitenantManager) + wallet_id = profile.settings.get("wallet.id") + cache = profile.inject_or(BaseCache) + await cache.flush() + settings = {"wallet.type": STORAGE_TYPE_VALUE_ANONCREDS} + await multitenant_mgr.update_wallet(wallet_id, settings) + + +async def finish_upgrade_by_updating_profile_or_shutting_down( + profile: Profile, is_subwallet=False +): + """Upgrade wallet to anoncreds and set storage type.""" + if is_subwallet: + await upgrade_subwallet(profile) + await finish_upgrade(profile) + LOGGER.info( + f"""Upgrade of subwallet {profile.settings.get('wallet.name')} has completed. Profile is now askar-anoncreds""" # noqa: E501 + ) + else: + await finish_upgrade(profile) + LOGGER.info( + f"Upgrade of base wallet {profile.settings.get('wallet.name')} to anoncreds has completed. Shutting down agent." # noqa: E501 + ) + asyncio.get_event_loop().stop() + + +async def check_upgrade_completion_loop(profile: Profile, is_subwallet=False): + """Check if upgrading is complete.""" + async with profile.session() as session: + while True: + storage = session.inject(BaseStorage) + LOGGER.debug(f"Checking upgrade completion for wallet: {profile.name}") + try: + upgrading_record = await storage.find_record( + RECORD_TYPE_ACAPY_UPGRADING, tag_query={} + ) + if upgrading_record.value == UPGRADING_RECORD_FINISHED: + IsAnoncredsSingleton().set_wallet(profile.name) + UpgradeInProgressSingleton().remove_wallet(profile.name) + if is_subwallet: + await upgrade_subwallet(profile) + LOGGER.info( + f"""Upgrade of subwallet {profile.settings.get('wallet.name')} has completed. Profile is now askar-anoncreds""" # noqa: E501 + ) + return + LOGGER.info( + f"Upgrade complete for wallet: {profile.name}, shutting down agent." # noqa: E501 + ) + # Shut down agent if base wallet + asyncio.get_event_loop().stop() + except StorageNotFoundError: + # If the record is not found, the upgrade failed + return + + await asyncio.sleep(1) diff --git a/aries_cloudagent/wallet/indy.py b/aries_cloudagent/wallet/indy.py deleted file mode 100644 index e8d33b6652..0000000000 --- a/aries_cloudagent/wallet/indy.py +++ /dev/null @@ -1,953 +0,0 @@ -"""Indy implementation of BaseWallet interface.""" - -import json -import logging - -from typing import List, Optional, Sequence, Tuple, Union - -import indy.anoncreds -import indy.did -import indy.crypto -import indy.wallet - -from indy.error import IndyError, ErrorCode - -from ..did.did_key import DIDKey -from ..indy.sdk.error import IndyErrorHandler -from ..indy.sdk.wallet_setup import IndyOpenWallet -from ..ledger.base import BaseLedger -from ..ledger.endpoint_type import EndpointType -from ..ledger.error import LedgerConfigError -from ..storage.indy import IndySdkStorage -from ..storage.error import StorageDuplicateError, StorageNotFoundError -from ..storage.record import StorageRecord - -from .base import BaseWallet -from .crypto import ( - create_keypair, - sign_message, - validate_seed, - verify_signed_message, -) -from .did_info import DIDInfo, KeyInfo -from .did_method import SOV, KEY, DIDMethod -from .error import WalletError, WalletDuplicateError, WalletNotFoundError -from .key_pair import KeyPairStorageManager -from .key_type import BLS12381G2, ED25519, KeyType, KeyTypes -from .util import b58_to_bytes, bytes_to_b58, bytes_to_b64 - - -LOGGER = logging.getLogger(__name__) - -RECORD_TYPE_CONFIG = "config" -RECORD_NAME_PUBLIC_DID = "default_public_did" - - -class IndySdkWallet(BaseWallet): - """Indy identity wallet implementation.""" - - def __init__(self, opened: IndyOpenWallet): - """Create a new IndySdkWallet instance.""" - self.opened: IndyOpenWallet = opened - - def __did_info_from_indy_info(self, info): - metadata = json.loads(info["metadata"]) if info["metadata"] else {} - did: str = info["did"] - verkey = info["verkey"] - - method = KEY if did.startswith("did:key") else SOV - key_type = ED25519 - - if method == KEY: - did = DIDKey.from_public_key_b58(info["verkey"], key_type).did - - return DIDInfo( - did=did, verkey=verkey, metadata=metadata, method=method, key_type=key_type - ) - - def __did_info_from_key_pair_info(self, info: dict): - metadata = info["metadata"] - verkey = info["verkey"] - - # TODO: inject context to support did method registry - method = SOV if metadata.get("method", "key") == SOV.method_name else KEY - # TODO: inject context to support keytype registry - key_types = KeyTypes() - key_type = key_types.from_key_type(info["key_type"]) - - if method == KEY: - did = DIDKey.from_public_key_b58(info["verkey"], key_type).did - - return DIDInfo( - did=did, verkey=verkey, metadata=metadata, method=method, key_type=key_type - ) - - async def __create_indy_signing_key( - self, key_type: KeyType, metadata: dict, seed: str = None - ) -> str: - if key_type != ED25519: - raise WalletError(f"Unsupported key type: {key_type.key_type}") - - args = {} - if seed: - args["seed"] = bytes_to_b64(validate_seed(seed)) - try: - verkey = await indy.crypto.create_key(self.opened.handle, json.dumps(args)) - except IndyError as x_indy: - if x_indy.error_code == ErrorCode.WalletItemAlreadyExists: - raise WalletDuplicateError("Verification key already present in wallet") - raise IndyErrorHandler.wrap_error( - x_indy, "Wallet {} error".format(self.opened.name), WalletError - ) from x_indy - - await indy.crypto.set_key_metadata( - self.opened.handle, verkey, json.dumps(metadata) - ) - - return verkey - - async def __create_keypair_signing_key( - self, key_type: KeyType, metadata: dict, seed: str = None - ) -> str: - if key_type != BLS12381G2: - raise WalletError(f"Unsupported key type: {key_type.key_type}") - - public_key, secret_key = create_keypair(key_type, validate_seed(seed)) - verkey = bytes_to_b58(public_key) - key_pair_mgr = KeyPairStorageManager(IndySdkStorage(self.opened)) - - # Check if key already exists - try: - key_info = await self.__get_keypair_signing_key(verkey) - if key_info: - raise WalletDuplicateError("Verification key already present in wallet") - except WalletNotFoundError: - # If we can't find the key, it means it doesn't exist already - # this is good - pass - - await key_pair_mgr.store_key_pair( - public_key=public_key, - secret_key=secret_key, - key_type=key_type, - metadata=metadata, - ) - - return verkey - - async def create_signing_key( - self, - key_type: KeyType, - seed: Optional[str] = None, - metadata: Optional[dict] = None, - ) -> KeyInfo: - """Create a new public/private signing keypair. - - Args: - seed: Seed for key - metadata: Optional metadata to store with the keypair - - Returns: - A `KeyInfo` representing the new record - - Raises: - WalletDuplicateError: If the resulting verkey already exists in the wallet - WalletError: If there is a libindy error - - """ - return await self.create_key(key_type, seed, metadata) - - async def create_key( - self, - key_type: KeyType, - seed: Optional[str] = None, - metadata: Optional[dict] = None, - ) -> KeyInfo: - """Create a new public/private keypair. - - Args: - key_type: Key type to create - seed: Seed for key - metadata: Optional metadata to store with the keypair - - Returns: - A `KeyInfo` representing the new record - - Raises: - WalletDuplicateError: If the resulting verkey already exists in the wallet - WalletError: If there is another backend error - """ - - # must save metadata to allow identity check - # otherwise get_key_metadata just returns WalletItemNotFound - if metadata is None: - metadata = {} - - # All ed25519 keys are handled by indy - if key_type == ED25519: - verkey = await self.__create_indy_signing_key(key_type, metadata, seed) - # All other (only bls12381g2 atm) are handled outside of indy - else: - verkey = await self.__create_keypair_signing_key(key_type, metadata, seed) - - return KeyInfo(verkey=verkey, metadata=metadata, key_type=key_type) - - async def __get_indy_signing_key(self, verkey: str) -> KeyInfo: - try: - metadata = await indy.crypto.get_key_metadata(self.opened.handle, verkey) - - return KeyInfo( - verkey=verkey, - metadata=json.loads(metadata) if metadata else {}, - key_type=ED25519, - ) - except IndyError as x_indy: - if x_indy.error_code == ErrorCode.WalletItemNotFound: - raise WalletNotFoundError(f"Unknown key: {verkey}") - # # If we resolve a key that is not 32 bytes we get CommonInvalidStructure - # elif x_indy.error_code == ErrorCode.CommonInvalidStructure: - # raise WalletNotFoundError(f"Unknown key: {verkey}") - else: - raise IndyErrorHandler.wrap_error( - x_indy, "Wallet {} error".format(self.opened.name), WalletError - ) from x_indy - - async def __get_keypair_signing_key(self, verkey: str) -> KeyInfo: - try: - key_pair_mgr = KeyPairStorageManager(IndySdkStorage(self.opened)) - key_pair = await key_pair_mgr.get_key_pair(verkey) - # TODO: inject context to support more keytypes - key_types = KeyTypes() - return KeyInfo( - verkey=verkey, - metadata=key_pair["metadata"], - key_type=key_types.from_key_type(key_pair["key_type"]) or BLS12381G2, - ) - except StorageNotFoundError: - raise WalletNotFoundError(f"Unknown key: {verkey}") - except StorageDuplicateError: - raise WalletDuplicateError(f"Multiple keys exist for verkey: {verkey}") - - async def get_signing_key(self, verkey: str) -> KeyInfo: - """Fetch info for a signing keypair. - - Args: - verkey: The verification key of the keypair - - Returns: - A `KeyInfo` representing the keypair - - Raises: - WalletNotFoundError: If no keypair is associated with the verification key - WalletError: If there is a libindy error - - """ - if not verkey: - raise WalletError("Missing required input parameter: verkey") - - # Only try to load indy signing key if the verkey is 32 bytes - # this may change if indy is going to support verkeys of different byte length - if len(b58_to_bytes(verkey)) == 32: - try: - return await self.__get_indy_signing_key(verkey) - except WalletNotFoundError: - return await self.__get_keypair_signing_key(verkey) - else: - return await self.__get_keypair_signing_key(verkey) - - async def replace_signing_key_metadata(self, verkey: str, metadata: dict): - """Replace the metadata associated with a signing keypair. - - Args: - verkey: The verification key of the keypair - metadata: The new metadata to store - - Raises: - WalletNotFoundError: if no keypair is associated with the verification key - - """ - metadata = metadata or {} - - # throw exception if key is undefined - key_info = await self.get_signing_key(verkey) - - # All ed25519 keys are handled by indy - if key_info.key_type == ED25519: - await indy.crypto.set_key_metadata( - self.opened.handle, verkey, json.dumps(metadata) - ) - # All other (only bls12381g2 atm) are handled outside of indy - else: - key_pair_mgr = KeyPairStorageManager(IndySdkStorage(self.opened)) - await key_pair_mgr.update_key_pair_metadata( - verkey=key_info.verkey, metadata=metadata - ) - - async def rotate_did_keypair_start(self, did: str, next_seed: str = None) -> str: - """Begin key rotation for DID that wallet owns: generate new keypair. - - Args: - did: signing DID - next_seed: incoming replacement seed (default random) - - Returns: - The new verification key - - """ - # Check if DID can rotate keys - # TODO: inject context for did method registry support - method_name = did.split(":")[1] if did.startswith("did:") else SOV.method_name - did_method = SOV if method_name == SOV.method_name else KEY - if not did_method.supports_rotation: - raise WalletError( - f"DID method '{did_method.method_name}' does not support key rotation." - ) - - try: - verkey = await indy.did.replace_keys_start( - self.opened.handle, - did, - json.dumps( - {"seed": bytes_to_b64(validate_seed(next_seed))} - if next_seed - else {} - ), - ) - except IndyError as x_indy: - if x_indy.error_code == ErrorCode.WalletItemNotFound: - raise WalletNotFoundError("Wallet owns no such DID: {}".format(did)) - raise IndyErrorHandler.wrap_error( - x_indy, "Wallet {} error".format(self.opened.name), WalletError - ) from x_indy - - return verkey - - async def rotate_did_keypair_apply(self, did: str) -> DIDInfo: - """Apply temporary keypair as main for DID that wallet owns. - - Args: - did: signing DID - - Returns: - DIDInfo with new verification key and metadata for DID - - """ - try: - await indy.did.replace_keys_apply(self.opened.handle, did) - except IndyError as x_indy: - if x_indy.error_code == ErrorCode.WalletItemNotFound: - raise WalletNotFoundError("Wallet owns no such DID: {}".format(did)) - raise IndyErrorHandler.wrap_error( - x_indy, "Wallet {} error".format(self.opened.name), WalletError - ) from x_indy - - async def __create_indy_local_did( - self, - method: DIDMethod, - key_type: KeyType, - metadata: dict = None, - seed: str = None, - *, - did: str = None, - ) -> DIDInfo: - if method not in [SOV, KEY]: - raise WalletError( - f"Unsupported DID method for indy storage: {method.method_name}" - ) - if key_type != ED25519: - raise WalletError( - f"Unsupported key type for indy storage: {key_type.key_type}" - ) - - cfg = {} - if seed: - cfg["seed"] = bytes_to_b64(validate_seed(seed)) - if did: - cfg["did"] = did - # Create fully qualified did. This helps with determining the - # did method when retrieving - if method != SOV: - cfg["method_name"] = method.method_name - did_json = json.dumps(cfg) - # crypto_type, cid - optional parameters skipped - try: - did, verkey = await indy.did.create_and_store_my_did( - self.opened.handle, did_json - ) - except IndyError as x_indy: - if x_indy.error_code == ErrorCode.DidAlreadyExistsError: - raise WalletDuplicateError("DID already present in wallet") - raise IndyErrorHandler.wrap_error( - x_indy, "Wallet {} error".format(self.opened.name), WalletError - ) from x_indy - - # did key uses different format - if method == KEY: - did = DIDKey.from_public_key_b58(verkey, key_type).did - - await self.replace_local_did_metadata(did, metadata or {}) - - return DIDInfo( - did=did, - verkey=verkey, - metadata=metadata or {}, - method=method, - key_type=key_type, - ) - - async def __create_keypair_local_did( - self, - method: DIDMethod, - key_type: KeyType, - metadata: dict = None, - seed: str = None, - ) -> DIDInfo: - if method != KEY: - raise WalletError( - f"Unsupported DID method for keypair storage: {method.method_name}" - ) - if key_type != BLS12381G2: - raise WalletError( - f"Unsupported key type for keypair storage: {key_type.key_type}" - ) - - public_key, secret_key = create_keypair(key_type, validate_seed(seed)) - key_pair_mgr = KeyPairStorageManager(IndySdkStorage(self.opened)) - # should change if other did methods are supported - did_key = DIDKey.from_public_key(public_key, key_type) - - if not metadata: - metadata = {} - metadata["method"] = method.method_name - - await key_pair_mgr.store_key_pair( - public_key=public_key, - secret_key=secret_key, - key_type=key_type, - metadata=metadata, - tags={"method": method.method_name}, - ) - - return DIDInfo( - did=did_key.did, - verkey=did_key.public_key_b58, - metadata=metadata, - method=method, - key_type=key_type, - ) - - async def create_local_did( - self, - method: DIDMethod, - key_type: KeyType, - seed: Optional[str] = None, - did: Optional[str] = None, - metadata: Optional[dict] = None, - ) -> DIDInfo: - """Create and store a new local DID. - - Args: - method: The method to use for the DID - key_type: The key type to use for the DID - seed: Optional seed to use for DID - did: The DID to use - metadata: Metadata to store with DID - - Returns: - A `DIDInfo` instance representing the created DID - - Raises: - WalletDuplicateError: If the DID already exists in the wallet - WalletError: If there is a libindy error - - """ - - # validate key_type - if not method.supports_key_type(key_type): - raise WalletError( - f"Invalid key type {key_type.key_type}" - f" for DID method {method.method_name}" - ) - - if method == KEY and did: - raise WalletError("Not allowed to set DID for DID method 'key'") - - # All ed25519 keys are handled by indy - if key_type == ED25519: - return await self.__create_indy_local_did( - method, key_type, metadata, seed, did=did - ) - # All other (only bls12381g2 atm) are handled outside of indy - else: - return await self.__create_keypair_local_did( - method, key_type, metadata, seed - ) - - async def store_did(self, did_info: DIDInfo) -> DIDInfo: - """Store a DID in the wallet. - - This enables components external to the wallet to define how a DID - is created and then store it in the wallet for later use. - - Args: - did_info: The DID to store - - Returns: - The stored `DIDInfo` - """ - raise WalletError("This operation is not supported by Indy-SDK wallets") - - async def get_local_dids(self) -> Sequence[DIDInfo]: - """Get list of defined local DIDs. - - Returns: - A list of locally stored DIDs as `DIDInfo` instances - - """ - # retrieve indy dids - info_json = await indy.did.list_my_dids_with_meta(self.opened.handle) - info = json.loads(info_json) - ret = [] - for did in info: - ret.append(self.__did_info_from_indy_info(did)) - - # retrieve key pairs with method set to key - # this needs to change if more did methods are added - key_pair_mgr = KeyPairStorageManager(IndySdkStorage(self.opened)) - key_pairs = await key_pair_mgr.find_key_pairs( - tag_query={"method": KEY.method_name} - ) - for key_pair in key_pairs: - ret.append(self.__did_info_from_key_pair_info(key_pair)) - - return ret - - async def __get_indy_local_did( - self, method: DIDMethod, key_type: KeyType, did: str - ) -> DIDInfo: - if method not in [SOV, KEY]: - raise WalletError( - f"Unsupported DID method for indy storage: {method.method_name}" - ) - if key_type != ED25519: - raise WalletError( - f"Unsupported DID type for indy storage: {key_type.key_type}" - ) - - # key type is always ed25519, method not always key - if method == KEY and key_type == ED25519: - did_key = DIDKey.from_did(did) - - # Ed25519 did:keys are masked indy dids so transform to indy - # did with did:key prefix. - did = "did:key:" + bytes_to_b58(did_key.public_key[:16]) - try: - info_json = await indy.did.get_my_did_with_meta(self.opened.handle, did) - except IndyError as x_indy: - if x_indy.error_code == ErrorCode.WalletItemNotFound: - raise WalletNotFoundError("Unknown DID: {}".format(did)) - raise IndyErrorHandler.wrap_error( - x_indy, "Wallet {} error".format(self.opened.name), WalletError - ) from x_indy - info = json.loads(info_json) - return self.__did_info_from_indy_info(info) - - async def __get_keypair_local_did( - self, method: DIDMethod, key_type: KeyType, did: str - ): - if method != KEY: - raise WalletError( - f"Unsupported DID method for keypair storage: {method.method_name}" - ) - if key_type != BLS12381G2: - raise WalletError( - f"Unsupported DID type for keypair storage: {key_type.key_type}" - ) - - # method is always did:key - did_key = DIDKey.from_did(did) - - key_pair_mgr = KeyPairStorageManager(IndySdkStorage(self.opened)) - key_pair = await key_pair_mgr.get_key_pair(verkey=did_key.public_key_b58) - return self.__did_info_from_key_pair_info(key_pair) - - async def get_local_did(self, did: str) -> DIDInfo: - """Find info for a local DID. - - Args: - did: The DID for which to get info - - Returns: - A `DIDInfo` instance representing the found DID - - Raises: - WalletNotFoundError: If the DID is not found - WalletError: If there is a libindy error - - """ - # TODO: inject context for did method registry support - method_name = did.split(":")[1] if did.startswith("did:") else SOV.method_name - method = SOV if method_name == SOV.method_name else KEY - key_type = ED25519 - - # If did key, the key type can differ - if method == KEY: - did_key = DIDKey.from_did(did) - key_type = did_key.key_type - - if key_type == ED25519: - return await self.__get_indy_local_did(method, key_type, did) - else: - return await self.__get_keypair_local_did(method, key_type, did) - - async def get_local_did_for_verkey(self, verkey: str) -> DIDInfo: - """Resolve a local DID from a verkey. - - Args: - verkey: The verkey for which to get the local DID - - Returns: - A `DIDInfo` instance representing the found DID - - Raises: - WalletNotFoundError: If the verkey is not found - - """ - - dids = await self.get_local_dids() - for info in dids: - if info.verkey == verkey: - return info - raise WalletNotFoundError("No DID defined for verkey: {}".format(verkey)) - - async def replace_local_did_metadata(self, did: str, metadata: dict): - """Replace metadata for a local DID. - - Args: - did: The DID for which to replace metadata - metadata: The new metadata - - """ - if not metadata: - metadata = {} - did_info = await self.get_local_did(did) # throw exception if undefined - - # ed25519 keys are handled by indy - if did_info.key_type == ED25519: - try: - await indy.did.set_did_metadata( - self.opened.handle, did, json.dumps(metadata) - ) - except IndyError as x_indy: - raise IndyErrorHandler.wrap_error( - x_indy, "Wallet {} error".format(self.opened.name), WalletError - ) from x_indy - # all other keys are handled by key pair - else: - key_pair_mgr = KeyPairStorageManager(IndySdkStorage(self.opened)) - await key_pair_mgr.update_key_pair_metadata( - verkey=did_info.verkey, metadata=metadata - ) - - async def get_public_did(self) -> DIDInfo: - """Retrieve the public DID. - - Returns: - The currently public `DIDInfo`, if any - - """ - - public_did = None - public_info = None - public_item = None - storage = IndySdkStorage(self.opened) - try: - public_item = await storage.get_record( - RECORD_TYPE_CONFIG, RECORD_NAME_PUBLIC_DID - ) - except StorageNotFoundError: - # populate public DID record - # this should only happen once, for an upgraded wallet - # the 'public' metadata flag is no longer used - dids = await self.get_local_dids() - for info in dids: - if info.metadata.get("public"): - public_did = info.did - public_info = info - break - try: - # even if public is not set, store a record - # to avoid repeated queries - await storage.add_record( - StorageRecord( - type=RECORD_TYPE_CONFIG, - id=RECORD_NAME_PUBLIC_DID, - value=json.dumps({"did": public_did}), - ) - ) - except StorageDuplicateError: - # another process stored the record first - public_item = await storage.get_record( - RECORD_TYPE_CONFIG, RECORD_NAME_PUBLIC_DID - ) - if public_item: - public_did = json.loads(public_item.value)["did"] - if public_did: - try: - public_info = await self.get_local_did(public_did) - except WalletNotFoundError: - pass - - return public_info - - async def set_public_did(self, did: Union[str, DIDInfo]) -> DIDInfo: - """Assign the public DID. - - Returns: - The updated `DIDInfo` - - """ - - if isinstance(did, str): - # will raise an exception if not found - info = await self.get_local_did(did) - else: - info = did - - public = await self.get_public_did() - if not public or public.did != info.did: - if not info.metadata.get("posted"): - metadata = {**info.metadata, "posted": True} - await self.replace_local_did_metadata(info.did, metadata) - info = info._replace(metadata=metadata) - storage = IndySdkStorage(self.opened) - await storage.update_record( - StorageRecord( - type=RECORD_TYPE_CONFIG, - id=RECORD_NAME_PUBLIC_DID, - value="{}", - ), - value=json.dumps({"did": info.did}), - tags=None, - ) - public = info - - return public - - async def set_did_endpoint( - self, - did: str, - endpoint: str, - ledger: BaseLedger, - endpoint_type: EndpointType = None, - write_ledger: bool = True, - endorser_did: str = None, - routing_keys: List[str] = None, - ): - """Update the endpoint for a DID in the wallet, send to ledger if posted. - - Args: - did: DID for which to set endpoint - endpoint: the endpoint to set, None to clear - ledger: the ledger to which to send endpoint update if - DID is public or posted - endpoint_type: the type of the endpoint/service. Only endpoint_type - 'endpoint' affects local wallet - """ - did_info = await self.get_local_did(did) - if did_info.method != SOV: - raise WalletError("Setting DID endpoint is only allowed for did:sov DIDs") - - metadata = {**did_info.metadata} - if not endpoint_type: - endpoint_type = EndpointType.ENDPOINT - if endpoint_type == EndpointType.ENDPOINT: - metadata[endpoint_type.indy] = endpoint - - wallet_public_didinfo = await self.get_public_did() - if ( - wallet_public_didinfo and wallet_public_didinfo.did == did - ) or did_info.metadata.get("posted"): - # if DID on ledger, set endpoint there first - if not ledger: - raise LedgerConfigError( - f"No ledger available but DID {did} is public: missing wallet-type?" - ) - if not ledger.read_only: - async with ledger: - attrib_def = await ledger.update_endpoint_for_did( - did, - endpoint, - endpoint_type, - write_ledger=write_ledger, - endorser_did=endorser_did, - routing_keys=routing_keys, - ) - if not write_ledger: - return attrib_def - - await self.replace_local_did_metadata(did, metadata) - - async def sign_message(self, message: bytes, from_verkey: str) -> bytes: - """Sign a message using the private key associated with a given verkey. - - Args: - message: Message bytes to sign - from_verkey: The verkey to use to sign - - Returns: - A signature - - Raises: - WalletError: If the message is not provided - WalletError: If the verkey is not provided - WalletError: If a libindy error occurs - - """ - if not message: - raise WalletError("Message not provided") - if not from_verkey: - raise WalletError("Verkey not provided") - - try: - key_info = await self.get_signing_key(from_verkey) - except WalletNotFoundError: - key_info = await self.get_local_did_for_verkey(from_verkey) - - # ed25519 keys are handled by indy - if key_info.key_type == ED25519: - try: - result = await indy.crypto.crypto_sign( - self.opened.handle, from_verkey, message - ) - except IndyError: - raise WalletError("Exception when signing message") - # other keys are handled outside of indy - else: - key_pair_mgr = KeyPairStorageManager(IndySdkStorage(self.opened)) - key_pair = await key_pair_mgr.get_key_pair(verkey=key_info.verkey) - result = sign_message( - message=message, - secret=b58_to_bytes(key_pair["secret_key"]), - key_type=key_info.key_type, - ) - - return result - - async def verify_message( - self, - message: Union[List[bytes], bytes], - signature: bytes, - from_verkey: str, - key_type: KeyType, - ) -> bool: - """Verify a signature against the public key of the signer. - - Args: - message: Message to verify - signature: Signature to verify - from_verkey: Verkey to use in verification - - Returns: - True if verified, else False - - Raises: - WalletError: If the verkey is not provided - WalletError: If the signature is not provided - WalletError: If the message is not provided - WalletError: If a libindy error occurs - - """ - if not from_verkey: - raise WalletError("Verkey not provided") - if not signature: - raise WalletError("Signature not provided") - if not message: - raise WalletError("Message not provided") - - # ed25519 keys are handled by indy - if key_type == ED25519: - try: - result = await indy.crypto.crypto_verify( - from_verkey, message, signature - ) - except IndyError as x_indy: - if x_indy.error_code == ErrorCode.CommonInvalidStructure: - result = False - else: - raise IndyErrorHandler.wrap_error( - x_indy, "Wallet {} error".format(self.opened.name), WalletError - ) from x_indy - return result - # all other keys (only bls12381g2 atm) are handled outside of indy - else: - return verify_signed_message( - message=message, - signature=signature, - verkey=b58_to_bytes(from_verkey), - key_type=key_type, - ) - - async def pack_message( - self, message: str, to_verkeys: Sequence[str], from_verkey: str = None - ) -> bytes: - """Pack a message for one or more recipients. - - Args: - message: The message to pack - to_verkeys: List of verkeys for which to pack - from_verkey: Sender verkey from which to pack - - Returns: - The resulting packed message bytes - - Raises: - WalletError: If no message is provided - WalletError: If a libindy error occurs - - """ - if message is None: - raise WalletError("Message not provided") - try: - result = await indy.crypto.pack_message( - self.opened.handle, message, to_verkeys, from_verkey - ) - except IndyError as x_indy: - raise IndyErrorHandler.wrap_error( - x_indy, "Exception when packing message", WalletError - ) from x_indy - - return result - - async def unpack_message(self, enc_message: bytes) -> Tuple[str, str, str]: - """Unpack a message. - - Args: - enc_message: The packed message bytes - - Returns: - A tuple: (message, from_verkey, to_verkey) - - Raises: - WalletError: If the message is not provided - WalletError: If a libindy error occurs - - """ - if not enc_message: - raise WalletError("Message not provided") - try: - unpacked_json = await indy.crypto.unpack_message( - self.opened.handle, enc_message - ) - except IndyError: - raise WalletError("Exception when unpacking message") - unpacked = json.loads(unpacked_json) - message = unpacked["message"] - to_verkey = unpacked.get("recipient_verkey", None) - from_verkey = unpacked.get("sender_verkey", None) - return message, from_verkey, to_verkey - - @classmethod - async def generate_wallet_key(self, seed: str = None) -> str: - """Generate a raw Indy wallet key.""" - return await indy.wallet.generate_wallet_key(seed) diff --git a/aries_cloudagent/wallet/key_pair.py b/aries_cloudagent/wallet/key_pair.py index b1e7013715..b87e2f56b3 100644 --- a/aries_cloudagent/wallet/key_pair.py +++ b/aries_cloudagent/wallet/key_pair.py @@ -1,9 +1,10 @@ """Key pair storage manager.""" import json -import uuid from typing import List, Mapping, Optional, Sequence +from uuid_utils import uuid4 + from ..storage.base import BaseStorage from ..storage.record import StorageRecord from .key_type import KeyType @@ -53,7 +54,7 @@ async def store_key_pair( KEY_PAIR_STORAGE_TYPE, json.dumps(data), {**tags, "verkey": verkey, "key_type": key_type.key_type}, - uuid.uuid4().hex, + uuid4().hex, ) await self._store.add_record(record) diff --git a/aries_cloudagent/wallet/routes.py b/aries_cloudagent/wallet/routes.py index 5dc222c0c7..7fb9c75ea5 100644 --- a/aries_cloudagent/wallet/routes.py +++ b/aries_cloudagent/wallet/routes.py @@ -1,5 +1,6 @@ """Wallet admin routes.""" +import asyncio import json import logging from typing import List, Optional, Tuple, Union @@ -10,6 +11,7 @@ from aries_cloudagent.connections.base_manager import BaseConnectionManager +from ..admin.decorators.auth import tenant_authentication from ..admin.request_context import AdminRequestContext from ..config.injection_context import InjectionContext from ..connections.models.conn_record import ConnRecord @@ -55,15 +57,23 @@ is_author_role, ) from ..resolver.base import ResolverError +from ..storage.base import BaseStorage from ..storage.error import StorageError, StorageNotFoundError +from ..storage.record import StorageRecord +from ..storage.type import RECORD_TYPE_ACAPY_UPGRADING from ..wallet.jwt import jwt_sign, jwt_verify from ..wallet.sd_jwt import sd_jwt_sign, sd_jwt_verify +from .anoncreds_upgrade import ( + UPGRADING_RECORD_IN_PROGRESS, + upgrade_wallet_to_anoncreds_if_requested, +) from .base import BaseWallet from .did_info import DIDInfo from .did_method import KEY, PEER2, PEER4, SOV, DIDMethod, DIDMethods, HolderDefinedDid from .did_posture import DIDPosture from .error import WalletError, WalletNotFoundError from .key_type import BLS12381G2, ED25519, KeyTypes +from .singletons import UpgradeInProgressSingleton from .util import EVENT_LISTENER_PATTERN LOGGER = logging.getLogger(__name__) @@ -425,6 +435,7 @@ def format_did_info(info: DIDInfo): @docs(tags=["wallet"], summary="List wallet DIDs") @querystring_schema(DIDListQueryStringSchema()) @response_schema(DIDListSchema, 200, description="") +@tenant_authentication async def wallet_did_list(request: web.BaseRequest): """Request handler for searching wallet DIDs. @@ -532,6 +543,7 @@ async def wallet_did_list(request: web.BaseRequest): @docs(tags=["wallet"], summary="Create a local DID") @request_schema(DIDCreateSchema()) @response_schema(DIDResultSchema, 200, description="") +@tenant_authentication async def wallet_create_did(request: web.BaseRequest): """Request handler for creating a new local DID in the wallet. @@ -653,6 +665,7 @@ async def wallet_create_did(request: web.BaseRequest): @docs(tags=["wallet"], summary="Fetch the current public DID") @response_schema(DIDResultSchema, 200, description="") +@tenant_authentication async def wallet_get_public_did(request: web.BaseRequest): """Request handler for fetching the current public DID. @@ -683,6 +696,7 @@ async def wallet_get_public_did(request: web.BaseRequest): @querystring_schema(AttribConnIdMatchInfoSchema()) @querystring_schema(MediationIDSchema()) @response_schema(DIDResultSchema, 200, description="") +@tenant_authentication async def wallet_set_public_did(request: web.BaseRequest): """Request handler for setting the current public DID. @@ -928,6 +942,7 @@ async def promote_wallet_public_did( @querystring_schema(CreateAttribTxnForEndorserOptionSchema()) @querystring_schema(AttribConnIdMatchInfoSchema()) @response_schema(WalletModuleResponseSchema(), description="") +@tenant_authentication async def wallet_set_did_endpoint(request: web.BaseRequest): """Request handler for setting an endpoint for a DID. @@ -1046,6 +1061,7 @@ async def wallet_set_did_endpoint(request: web.BaseRequest): @docs(tags=["wallet"], summary="Create a EdDSA jws using did keys with a given payload") @request_schema(JWSCreateSchema) @response_schema(WalletModuleResponseSchema(), description="") +@tenant_authentication async def wallet_jwt_sign(request: web.BaseRequest): """Request handler for jws creation using did. @@ -1082,6 +1098,7 @@ async def wallet_jwt_sign(request: web.BaseRequest): ) @request_schema(SDJWSCreateSchema) @response_schema(WalletModuleResponseSchema(), description="") +@tenant_authentication async def wallet_sd_jwt_sign(request: web.BaseRequest): """Request handler for sd-jws creation using did. @@ -1118,6 +1135,7 @@ async def wallet_sd_jwt_sign(request: web.BaseRequest): @docs(tags=["wallet"], summary="Verify a EdDSA jws using did keys with a given JWS") @request_schema(JWSVerifySchema()) @response_schema(JWSVerifyResponseSchema(), 200, description="") +@tenant_authentication async def wallet_jwt_verify(request: web.BaseRequest): """Request handler for jws validation using did. @@ -1151,6 +1169,7 @@ async def wallet_jwt_verify(request: web.BaseRequest): ) @request_schema(SDJWSVerifySchema()) @response_schema(SDJWSVerifyResponseSchema(), 200, description="") +@tenant_authentication async def wallet_sd_jwt_verify(request: web.BaseRequest): """Request handler for sd-jws validation using did. @@ -1173,6 +1192,7 @@ async def wallet_sd_jwt_verify(request: web.BaseRequest): @docs(tags=["wallet"], summary="Query DID endpoint in wallet") @querystring_schema(DIDQueryStringSchema()) @response_schema(DIDEndpointSchema, 200, description="") +@tenant_authentication async def wallet_get_did_endpoint(request: web.BaseRequest): """Request handler for getting the current DID endpoint from the wallet. @@ -1206,6 +1226,7 @@ async def wallet_get_did_endpoint(request: web.BaseRequest): @docs(tags=["wallet"], summary="Rotate keypair for a DID not posted to the ledger") @querystring_schema(DIDQueryStringSchema()) @response_schema(WalletModuleResponseSchema(), description="") +@tenant_authentication async def wallet_rotate_did_keypair(request: web.BaseRequest): """Request handler for rotating local DID keypair. @@ -1241,6 +1262,74 @@ async def wallet_rotate_did_keypair(request: web.BaseRequest): return web.json_response({}) +class UpgradeVerificationSchema(OpenAPISchema): + """Parameters and validators for triggering an upgrade to anoncreds.""" + + wallet_name = fields.Str( + required=True, + metadata={ + "description": "Name of wallet to upgrade to anoncreds", + "example": "base-wallet", + }, + ) + + +class UpgradeResultSchema(OpenAPISchema): + """Result schema for upgrade.""" + + +@docs( + tags=["anoncreds - wallet upgrade"], + summary=""" + Upgrade the wallet from askar to anoncreds - Be very careful with this! You + cannot go back! See migration guide for more information. + """, +) +@querystring_schema(UpgradeVerificationSchema()) +@response_schema(UpgradeResultSchema(), description="") +@tenant_authentication +async def upgrade_anoncreds(request: web.BaseRequest): + """Request handler for triggering an upgrade to anoncreds. + + Args: + request: aiohttp request object + + Returns: + An empty JSON response + + """ + context: AdminRequestContext = request["context"] + profile = context.profile + + if profile.settings.get("wallet.name") != request.query.get("wallet_name"): + raise web.HTTPBadRequest( + reason="Wallet name parameter does not match the agent which triggered the upgrade" # noqa: E501 + ) + + if profile.settings.get("wallet.type") == "askar-anoncreds": + raise web.HTTPBadRequest(reason="Wallet type is already anoncreds") + + async with profile.session() as session: + storage = session.inject(BaseStorage) + upgrading_record = StorageRecord( + RECORD_TYPE_ACAPY_UPGRADING, + UPGRADING_RECORD_IN_PROGRESS, + ) + await storage.add_record(upgrading_record) + is_subwallet = context.metadata and "wallet_id" in context.metadata + asyncio.create_task( + upgrade_wallet_to_anoncreds_if_requested(profile, is_subwallet) + ) + UpgradeInProgressSingleton().set_wallet(profile.name) + + return web.json_response( + { + "success": True, + "message": f"Upgrade to anoncreds has been triggered for wallet {profile.name}", # noqa: E501 + } + ) + + def register_events(event_bus: EventBus): """Subscribe to any events we need to support.""" event_bus.subscribe(EVENT_LISTENER_PATTERN, on_register_nym_event) @@ -1333,6 +1422,7 @@ async def register(app: web.Application): "/wallet/get-did-endpoint", wallet_get_did_endpoint, allow_head=False ), web.patch("/wallet/did/local/rotate-keypair", wallet_rotate_did_keypair), + web.post("/anoncreds/wallet/upgrade", upgrade_anoncreds), ] ) @@ -1356,3 +1446,13 @@ def post_process_routes(app: web.Application): }, } ) + app._state["swagger_dict"]["tags"].append( + { + "name": "anoncreds - wallet upgrade", + "description": "Anoncreds wallet upgrade", + "externalDocs": { + "description": "Specification", + "url": "https://hyperledger.github.io/anoncreds-spec", + }, + } + ) diff --git a/aries_cloudagent/wallet/singletons.py b/aries_cloudagent/wallet/singletons.py new file mode 100644 index 0000000000..9a7a91d057 --- /dev/null +++ b/aries_cloudagent/wallet/singletons.py @@ -0,0 +1,43 @@ +"""Module that contains singleton classes for wallet operations.""" + + +class IsAnoncredsSingleton: + """Singleton class used as cache for anoncreds wallet-type queries.""" + + instance = None + wallets = set() + + def __new__(cls, *args, **kwargs): + """Create a new instance of the class.""" + if cls.instance is None: + cls.instance = super().__new__(cls) + return cls.instance + + def set_wallet(self, wallet: str): + """Set a wallet name.""" + self.wallets.add(wallet) + + def remove_wallet(self, wallet: str): + """Remove a wallet name.""" + self.wallets.discard(wallet) + + +class UpgradeInProgressSingleton: + """Singleton class used as cache for upgrade in progress.""" + + instance = None + wallets = set() + + def __new__(cls, *args, **kwargs): + """Create a new instance of the class.""" + if cls.instance is None: + cls.instance = super().__new__(cls) + return cls.instance + + def set_wallet(self, wallet: str): + """Set a wallet name.""" + self.wallets.add(wallet) + + def remove_wallet(self, wallet: str): + """Remove a wallet name.""" + self.wallets.discard(wallet) diff --git a/aries_cloudagent/wallet/tests/test_anoncreds_upgrade.py b/aries_cloudagent/wallet/tests/test_anoncreds_upgrade.py new file mode 100644 index 0000000000..00c52bc623 --- /dev/null +++ b/aries_cloudagent/wallet/tests/test_anoncreds_upgrade.py @@ -0,0 +1,406 @@ +import asyncio +from time import time +from unittest import IsolatedAsyncioTestCase + +from aries_cloudagent.tests import mock +from aries_cloudagent.wallet import singletons + +from ...anoncreds.issuer import CATEGORY_CRED_DEF_PRIVATE +from ...cache.base import BaseCache +from ...core.in_memory.profile import InMemoryProfile, InMemoryProfileSession +from ...indy.credx.issuer import CATEGORY_CRED_DEF_KEY_PROOF +from ...messaging.credential_definitions.util import CRED_DEF_SENT_RECORD_TYPE +from ...messaging.schemas.util import SCHEMA_SENT_RECORD_TYPE +from ...multitenant.base import BaseMultitenantManager +from ...multitenant.manager import MultitenantManager +from ...storage.base import BaseStorage +from ...storage.record import StorageRecord +from ...storage.type import ( + RECORD_TYPE_ACAPY_STORAGE_TYPE, + RECORD_TYPE_ACAPY_UPGRADING, + STORAGE_TYPE_VALUE_ANONCREDS, +) +from .. import anoncreds_upgrade + + +class TestAnoncredsUpgrade(IsolatedAsyncioTestCase): + def setUp(self) -> None: + self.profile = InMemoryProfile.test_profile( + settings={"wallet.type": "askar", "wallet.id": "test-wallet-id"} + ) + self.context = self.profile.context + self.context.injector.bind_instance( + BaseMultitenantManager, mock.MagicMock(MultitenantManager, autospec=True) + ) + self.context.injector.bind_instance( + BaseCache, mock.MagicMock(BaseCache, autospec=True) + ) + + @mock.patch.object(InMemoryProfileSession, "handle") + async def test_convert_records_to_anoncreds(self, mock_handle): + async with self.profile.session() as session: + storage = session.inject(BaseStorage) + mock_handle.fetch = mock.CoroutineMock(return_value=None) + + schema_id = "GHjSbphAcdsrZrLjSvsjMp:2:faber-simple:1.1" + schema_id_parts = schema_id.split(":") + schema_tags = { + "schema_id": schema_id, + "schema_issuer_did": schema_id_parts[0], + "schema_name": schema_id_parts[-2], + "schema_version": schema_id_parts[-1], + "epoch": str(int(time())), + } + await storage.add_record( + StorageRecord(SCHEMA_SENT_RECORD_TYPE, schema_id, schema_tags) + ) + + credential_definition_id = "GHjSbphAcdsrZrLjSvsjMp:3:CL:8:default" + cred_def_tags = { + "schema_id": schema_id, + "schema_issuer_did": schema_id_parts[0], + "schema_name": schema_id_parts[-2], + "schema_version": schema_id_parts[-1], + "issuer_did": "GHjSbphAcdsrZrLjSvsjMp", + "cred_def_id": credential_definition_id, + "epoch": str(int(time())), + } + await storage.add_record( + StorageRecord( + CRED_DEF_SENT_RECORD_TYPE, credential_definition_id, cred_def_tags + ) + ) + storage.get_record = mock.CoroutineMock( + side_effect=[ + StorageRecord( + CATEGORY_CRED_DEF_PRIVATE, + {"p_key": {"p": "123...782", "q": "234...456"}, "r_key": None}, + {}, + ), + StorageRecord( + CATEGORY_CRED_DEF_KEY_PROOF, + {"c": "103...961", "xz_cap": "563...205", "xr_cap": []}, + {}, + ), + ] + ) + anoncreds_upgrade.IndyLedgerRequestsExecutor = mock.MagicMock() + anoncreds_upgrade.IndyLedgerRequestsExecutor.return_value.get_ledger_for_identifier = mock.CoroutineMock( + return_value=( + None, + mock.MagicMock( + get_schema=mock.CoroutineMock( + return_value={ + "attrNames": [ + "name", + "age", + ], + }, + ), + get_credential_definition=mock.CoroutineMock( + return_value={ + "type": "CL", + "tag": "default", + "value": { + "primary": { + "n": "123", + }, + }, + }, + ), + ), + ) + ) + + with mock.patch.object( + anoncreds_upgrade, "upgrade_and_delete_schema_records" + ), mock.patch.object( + anoncreds_upgrade, "upgrade_and_delete_cred_def_records" + ): + await anoncreds_upgrade.convert_records_to_anoncreds(self.profile) + + @mock.patch.object(InMemoryProfileSession, "handle") + async def test_retry_converting_records(self, mock_handle): + mock_handle.fetch = mock.CoroutineMock(return_value=None) + with mock.patch.object( + anoncreds_upgrade, "convert_records_to_anoncreds", mock.CoroutineMock() + ) as mock_convert_records_to_anoncreds: + mock_convert_records_to_anoncreds.side_effect = [ + Exception("Error"), + Exception("Error"), + None, + ] + async with self.profile.session() as session: + storage = session.inject(BaseStorage) + upgrading_record = StorageRecord( + RECORD_TYPE_ACAPY_UPGRADING, + anoncreds_upgrade.UPGRADING_RECORD_IN_PROGRESS, + ) + await storage.add_record(upgrading_record) + await anoncreds_upgrade.retry_converting_records( + self.profile, upgrading_record, 0 + ) + + assert mock_convert_records_to_anoncreds.call_count == 3 + storage_type_record = await storage.find_record( + RECORD_TYPE_ACAPY_STORAGE_TYPE, tag_query={} + ) + upgrading_record = await storage.find_record( + RECORD_TYPE_ACAPY_UPGRADING, tag_query={} + ) + assert storage_type_record.value == STORAGE_TYPE_VALUE_ANONCREDS + assert ( + upgrading_record.value + == anoncreds_upgrade.UPGRADING_RECORD_FINISHED + ) + assert "test-profile" in singletons.IsAnoncredsSingleton().wallets + + @mock.patch.object(InMemoryProfileSession, "handle") + async def test_upgrade_wallet_to_anoncreds(self, mock_handle): + mock_handle.fetch = mock.CoroutineMock(return_value=None) + + # upgrading record not present + await anoncreds_upgrade.upgrade_wallet_to_anoncreds_if_requested(self.profile) + + # upgrading record present + async with self.profile.session() as session: + storage = session.inject(BaseStorage) + await storage.add_record( + StorageRecord( + RECORD_TYPE_ACAPY_UPGRADING, + anoncreds_upgrade.UPGRADING_RECORD_IN_PROGRESS, + ) + ) + await anoncreds_upgrade.upgrade_wallet_to_anoncreds_if_requested( + self.profile + ) + storage_type_record = await storage.find_record( + RECORD_TYPE_ACAPY_STORAGE_TYPE, tag_query={} + ) + upgrading_record = await storage.find_record( + RECORD_TYPE_ACAPY_UPGRADING, tag_query={} + ) + assert storage_type_record.value == STORAGE_TYPE_VALUE_ANONCREDS + assert upgrading_record.value == anoncreds_upgrade.UPGRADING_RECORD_FINISHED + assert "test-profile" in singletons.IsAnoncredsSingleton().wallets + + # retry called on exception + with mock.patch.object( + anoncreds_upgrade, + "convert_records_to_anoncreds", + mock.CoroutineMock(side_effect=[Exception("Error")]), + ), mock.patch.object( + anoncreds_upgrade, "retry_converting_records", mock.CoroutineMock() + ) as mock_retry_converting_records: + async with self.profile.session() as session: + storage = session.inject(BaseStorage) + upgrading_record = await storage.find_record( + RECORD_TYPE_ACAPY_UPGRADING, tag_query={} + ) + await storage.update_record( + upgrading_record, anoncreds_upgrade.UPGRADING_RECORD_IN_PROGRESS, {} + ) + await anoncreds_upgrade.upgrade_wallet_to_anoncreds_if_requested( + self.profile + ) + assert mock_retry_converting_records.called + + async def test_set_storage_type_to_anoncreds_no_existing_record(self): + await anoncreds_upgrade.finish_upgrade(self.profile) + _, storage_type_record = next(iter(self.profile.records.items())) + assert storage_type_record.value == STORAGE_TYPE_VALUE_ANONCREDS + + async def test_set_storage_type_to_anoncreds_has_existing_record(self): + async with self.profile.session() as session: + storage = session.inject(BaseStorage) + await storage.add_record( + StorageRecord( + RECORD_TYPE_ACAPY_STORAGE_TYPE, + "askar", + ) + ) + await anoncreds_upgrade.finish_upgrade(self.profile) + _, storage_type_record = next(iter(self.profile.records.items())) + assert storage_type_record.value == STORAGE_TYPE_VALUE_ANONCREDS + + async def test_update_if_subwallet_and_set_storage_type_with_subwallet(self): + + await anoncreds_upgrade.finish_upgrade_by_updating_profile_or_shutting_down( + self.profile, True + ) + _, storage_type_record = next(iter(self.profile.records.items())) + assert storage_type_record.value == STORAGE_TYPE_VALUE_ANONCREDS + assert self.profile.context.injector.get_provider( + BaseCache + )._instance.flush.called + + async def test_update_if_subwallet_and_set_storage_type_with_base_wallet(self): + + await anoncreds_upgrade.finish_upgrade_by_updating_profile_or_shutting_down( + self.profile, False + ) + _, storage_type_record = next(iter(self.profile.records.items())) + assert storage_type_record.value == STORAGE_TYPE_VALUE_ANONCREDS + + @mock.patch.object(InMemoryProfileSession, "handle") + async def test_failed_upgrade(self, mock_handle): + mock_handle.fetch = mock.CoroutineMock(return_value=None) + async with self.profile.session() as session: + storage = session.inject(BaseStorage) + + schema_id = "GHjSbphAcdsrZrLjSvsjMp:2:faber-simple:1.1" + schema_id_parts = schema_id.split(":") + schema_tags = { + "schema_id": schema_id, + "schema_issuer_did": schema_id_parts[0], + "schema_name": schema_id_parts[-2], + "schema_version": schema_id_parts[-1], + "epoch": str(int(time())), + } + await storage.add_record( + StorageRecord(SCHEMA_SENT_RECORD_TYPE, schema_id, schema_tags) + ) + await storage.add_record( + StorageRecord( + RECORD_TYPE_ACAPY_STORAGE_TYPE, + "askar", + ) + ) + await storage.add_record( + StorageRecord( + RECORD_TYPE_ACAPY_UPGRADING, + "true", + ) + ) + + credential_definition_id = "GHjSbphAcdsrZrLjSvsjMp:3:CL:8:default" + cred_def_tags = { + "schema_id": schema_id, + "schema_issuer_did": schema_id_parts[0], + "schema_name": schema_id_parts[-2], + "schema_version": schema_id_parts[-1], + "issuer_did": "GHjSbphAcdsrZrLjSvsjMp", + "cred_def_id": credential_definition_id, + "epoch": str(int(time())), + } + await storage.add_record( + StorageRecord( + CRED_DEF_SENT_RECORD_TYPE, credential_definition_id, cred_def_tags + ) + ) + storage.get_record = mock.CoroutineMock( + side_effect=[ + StorageRecord( + CATEGORY_CRED_DEF_PRIVATE, + {"p_key": {"p": "123...782", "q": "234...456"}, "r_key": None}, + {}, + ), + StorageRecord( + CATEGORY_CRED_DEF_KEY_PROOF, + {"c": "103...961", "xz_cap": "563...205", "xr_cap": []}, + {}, + ), + ] + ) + anoncreds_upgrade.IndyLedgerRequestsExecutor = mock.MagicMock() + anoncreds_upgrade.IndyLedgerRequestsExecutor.return_value.get_ledger_for_identifier = mock.CoroutineMock( + return_value=( + None, + mock.MagicMock( + get_schema=mock.CoroutineMock( + return_value={ + "attrNames": [ + "name", + "age", + ], + }, + ), + get_credential_definition=mock.CoroutineMock( + return_value={ + "type": "CL", + "tag": "default", + "value": { + "primary": { + "n": "123", + }, + }, + }, + ), + ), + ) + ) + + with mock.patch.object( + anoncreds_upgrade, "upgrade_and_delete_schema_records" + ), mock.patch.object( + anoncreds_upgrade, "upgrade_and_delete_cred_def_records" + ), mock.patch.object( + InMemoryProfileSession, "rollback" + ) as mock_rollback, mock.patch.object( + InMemoryProfileSession, + "commit", + # Don't wait for sleep in retry to speed up test + ) as mock_commit, mock.patch.object( + asyncio, "sleep" + ): + """ + Only tests schemas and cred_defs failing to upgrade because the other objects are + hard to mock. These tests should be enough to cover them as the logic is the same. + """ + + # Schemas fails to upgrade + anoncreds_upgrade.upgrade_and_delete_schema_records = mock.CoroutineMock( + # Needs to fail 5 times because of the retry logic + side_effect=[ + Exception("Error"), + Exception("Error"), + Exception("Error"), + Exception("Error"), + Exception("Error"), + ] + ) + await anoncreds_upgrade.upgrade_wallet_to_anoncreds_if_requested( + self.profile + ) + assert mock_rollback.called + assert not mock_commit.called + # Upgrading record should not be deleted + with self.assertRaises(Exception): + await storage.find_record( + type_filter=RECORD_TYPE_ACAPY_UPGRADING, tag_query={} + ) + + storage_type_record = await storage.find_record( + type_filter=RECORD_TYPE_ACAPY_STORAGE_TYPE, tag_query={} + ) + # Storage type should not be updated + assert storage_type_record.value == "askar" + + # Cred_defs fails to upgrade + anoncreds_upgrade.upgrade_and_delete_cred_def_records = ( + mock.CoroutineMock( + side_effect=[ + Exception("Error"), + Exception("Error"), + Exception("Error"), + Exception("Error"), + Exception("Error"), + ] + ) + ) + await anoncreds_upgrade.upgrade_wallet_to_anoncreds_if_requested( + self.profile + ) + assert mock_rollback.called + assert not mock_commit.called + # Upgrading record should not be deleted + with self.assertRaises(Exception): + await storage.find_record( + type_filter=RECORD_TYPE_ACAPY_UPGRADING, tag_query={} + ) + + storage_type_record = await storage.find_record( + type_filter=RECORD_TYPE_ACAPY_STORAGE_TYPE, tag_query={} + ) + # Storage type should not be updated + assert storage_type_record.value == "askar" diff --git a/aries_cloudagent/wallet/tests/test_indy_wallet.py b/aries_cloudagent/wallet/tests/test_indy_wallet.py deleted file mode 100644 index 1da9d7968d..0000000000 --- a/aries_cloudagent/wallet/tests/test_indy_wallet.py +++ /dev/null @@ -1,904 +0,0 @@ -import json -import os -from typing import cast - -import indy.anoncreds -import indy.crypto -import indy.did -import indy.wallet -import pytest -from aries_cloudagent.tests import mock - -from ...config.injection_context import InjectionContext -from ...core.error import ProfileDuplicateError, ProfileError, ProfileNotFoundError -from ...core.in_memory import InMemoryProfile -from ...indy.sdk import wallet_setup as test_setup_module -from ...indy.sdk.profile import IndySdkProfile, IndySdkProfileManager -from ...indy.sdk.wallet_setup import IndyWalletConfig -from ...ledger.endpoint_type import EndpointType -from ...ledger.indy import IndySdkLedgerPool -from ...wallet.did_method import SOV, DIDMethods -from ...wallet.key_type import ED25519 -from .. import indy as test_module -from ..base import BaseWallet -from ..in_memory import InMemoryWallet -from ..indy import IndySdkWallet -from . import test_in_memory_wallet - - -@pytest.fixture() -async def in_memory_wallet(): - profile = InMemoryProfile.test_profile(bind={DIDMethods: DIDMethods()}) - wallet = InMemoryWallet(profile) - yield wallet - - -@pytest.fixture() -async def wallet(): - key = await IndySdkWallet.generate_wallet_key() - context = InjectionContext() - context.injector.bind_instance(IndySdkLedgerPool, IndySdkLedgerPool("name")) - context.injector.bind_instance(DIDMethods, DIDMethods()) - with mock.patch.object(IndySdkProfile, "_make_finalizer"): - profile = cast( - IndySdkProfile, - await IndySdkProfileManager().provision( - context, - { - "auto_recreate": True, - "auto_remove": True, - "name": "test-wallet", - "key": key, - "key_derivation_method": "RAW", # much slower tests with argon-hashed keys - }, - ), - ) - async with profile.session() as session: - yield session.inject(BaseWallet) - await profile.close() - - -@pytest.mark.indy -class TestIndySdkWallet(test_in_memory_wallet.TestInMemoryWallet): - """Apply all InMemoryWallet tests against IndySdkWallet""" - - @pytest.mark.asyncio - async def test_rotate_did_keypair_x(self, wallet: IndySdkWallet): - info = await wallet.create_local_did( - SOV, ED25519, self.test_seed, self.test_sov_did - ) - - with mock.patch.object( - indy.did, "replace_keys_start", mock.CoroutineMock() - ) as mock_repl_start: - mock_repl_start.side_effect = test_module.IndyError( - test_module.ErrorCode.CommonIOError, {"message": "outlier"} - ) - with pytest.raises(test_module.WalletError) as excinfo: - await wallet.rotate_did_keypair_start(self.test_sov_did) - assert "outlier" in str(excinfo.value) - - with mock.patch.object( - indy.did, "replace_keys_apply", mock.CoroutineMock() - ) as mock_repl_apply: - mock_repl_apply.side_effect = test_module.IndyError( - test_module.ErrorCode.CommonIOError, {"message": "outlier"} - ) - with pytest.raises(test_module.WalletError) as excinfo: - await wallet.rotate_did_keypair_apply(self.test_sov_did) - assert "outlier" in str(excinfo.value) - - @pytest.mark.asyncio - async def test_create_signing_key_x(self, wallet: IndySdkWallet): - with mock.patch.object( - indy.crypto, "create_key", mock.CoroutineMock() - ) as mock_create_key: - mock_create_key.side_effect = test_module.IndyError( - test_module.ErrorCode.CommonIOError, {"message": "outlier"} - ) - with pytest.raises(test_module.WalletError) as excinfo: - await wallet.create_signing_key(ED25519) - assert "outlier" in str(excinfo.value) - - @pytest.mark.asyncio - async def test_create_local_did_x(self, wallet: IndySdkWallet): - with mock.patch.object( - indy.did, "create_and_store_my_did", mock.CoroutineMock() - ) as mock_create: - mock_create.side_effect = test_module.IndyError( - test_module.ErrorCode.CommonIOError, {"message": "outlier"} - ) - with pytest.raises(test_module.WalletError) as excinfo: - await wallet.create_local_did(SOV, ED25519) - assert "outlier" in str(excinfo.value) - - @pytest.mark.asyncio - async def test_set_did_endpoint_ledger(self, wallet: IndySdkWallet): - mock_ledger = mock.MagicMock( - read_only=False, update_endpoint_for_did=mock.CoroutineMock() - ) - info_pub = await wallet.create_public_did( - SOV, - ED25519, - ) - await wallet.set_did_endpoint(info_pub.did, "https://example.com", mock_ledger) - mock_ledger.update_endpoint_for_did.assert_called_once_with( - info_pub.did, - "https://example.com", - EndpointType.ENDPOINT, - endorser_did=None, - write_ledger=True, - routing_keys=None, - ) - info_pub2 = await wallet.get_public_did() - assert info_pub2.metadata["endpoint"] == "https://example.com" - - with pytest.raises(test_module.LedgerConfigError) as excinfo: - await wallet.set_did_endpoint(info_pub.did, "https://example.com", None) - assert "No ledger available" in str(excinfo.value) - - @pytest.mark.asyncio - async def test_set_did_endpoint_ledger_with_routing_keys( - self, wallet: IndySdkWallet - ): - routing_keys = ["3YJCx3TqotDWFGv7JMR5erEvrmgu5y4FDqjR7sKWxgXn"] - mock_ledger = mock.MagicMock( - read_only=False, update_endpoint_for_did=mock.CoroutineMock() - ) - info_pub = await wallet.create_public_did(SOV, ED25519) - await wallet.set_did_endpoint( - info_pub.did, "https://example.com", mock_ledger, routing_keys=routing_keys - ) - - mock_ledger.update_endpoint_for_did.assert_called_once_with( - info_pub.did, - "https://example.com", - EndpointType.ENDPOINT, - endorser_did=None, - write_ledger=True, - routing_keys=routing_keys, - ) - - @pytest.mark.asyncio - async def test_set_did_endpoint_readonly_ledger(self, wallet: IndySdkWallet): - mock_ledger = mock.MagicMock( - read_only=True, update_endpoint_for_did=mock.CoroutineMock() - ) - info_pub = await wallet.create_public_did( - SOV, - ED25519, - ) - await wallet.set_did_endpoint(info_pub.did, "https://example.com", mock_ledger) - mock_ledger.update_endpoint_for_did.assert_not_called() - info_pub2 = await wallet.get_public_did() - assert info_pub2.metadata["endpoint"] == "https://example.com" - - with pytest.raises(test_module.LedgerConfigError) as excinfo: - await wallet.set_did_endpoint(info_pub.did, "https://example.com", None) - assert "No ledger available" in str(excinfo.value) - - @pytest.mark.asyncio - async def test_get_signing_key_x(self, wallet: IndySdkWallet): - with mock.patch.object( - indy.crypto, "get_key_metadata", mock.CoroutineMock() - ) as mock_signing: - mock_signing.side_effect = test_module.IndyError( - test_module.ErrorCode.CommonIOError, {"message": "outlier"} - ) - with pytest.raises(test_module.WalletError) as excinfo: - await wallet.get_signing_key(None) - assert "Missing required input parameter: verkey" in str(excinfo.value) - - @pytest.mark.asyncio - async def test_get_local_did_x(self, wallet: IndySdkWallet): - with mock.patch.object( - indy.did, "get_my_did_with_meta", mock.CoroutineMock() - ) as mock_my: - mock_my.side_effect = test_module.IndyError( - test_module.ErrorCode.CommonIOError, {"message": "outlier"} - ) - with pytest.raises(test_module.WalletError) as excinfo: - await wallet.get_local_did("did:sov") - assert "outlier" in str(excinfo.value) - - @pytest.mark.asyncio - async def test_replace_local_did_metadata_x(self, wallet: IndySdkWallet): - info = await wallet.create_local_did( - SOV, - ED25519, - self.test_seed, - self.test_sov_did, - self.test_metadata, - ) - assert info.did == self.test_sov_did - assert info.verkey == self.test_ed25519_verkey - assert info.metadata == self.test_metadata - - with mock.patch.object( - indy.did, "set_did_metadata", mock.CoroutineMock() - ) as mock_set_did_metadata: - mock_set_did_metadata.side_effect = test_module.IndyError( - test_module.ErrorCode.CommonIOError, {"message": "outlier"} - ) - with pytest.raises(test_module.WalletError) as excinfo: - await wallet.replace_local_did_metadata(info.did, info.metadata) - assert "outlier" in str(excinfo.value) - - @pytest.mark.asyncio - async def test_verify_message_x(self, wallet: IndySdkWallet): - with mock.patch.object( - indy.crypto, "crypto_verify", mock.CoroutineMock() - ) as mock_verify: - mock_verify.side_effect = test_module.IndyError( - test_module.ErrorCode.CommonIOError, {"message": "outlier"} - ) - with pytest.raises(test_module.WalletError) as excinfo: - await wallet.verify_message( - b"hello world", - b"signature", - self.test_ed25519_verkey, - ED25519, - ) - assert "outlier" in str(excinfo.value) - - mock_verify.side_effect = test_module.IndyError( # plain wrong - test_module.ErrorCode.CommonInvalidStructure - ) - assert not await wallet.verify_message( - b"hello world", b"signature", self.test_ed25519_verkey, ED25519 - ) - - @pytest.mark.asyncio - async def test_pack_message_x(self, wallet: IndySdkWallet): - with mock.patch.object( - indy.crypto, "pack_message", mock.CoroutineMock() - ) as mock_pack: - mock_pack.side_effect = test_module.IndyError( # outlier - test_module.ErrorCode.CommonIOError, {"message": "outlier"} - ) - with pytest.raises(test_module.WalletError) as excinfo: - await wallet.pack_message( - b"hello world", - [ - self.test_ed25519_verkey, - ], - ) - assert "outlier" in str(excinfo.value) - - -@pytest.mark.indy -class TestWalletCompat: - """Tests for wallet compatibility.""" - - test_seed = "testseed000000000000000000000001" - test_did = "55GkHamhTU1ZbTbV2ab9DE" - test_verkey = "3Dn1SJNPaCXcvvJvSbsFWP2xaCjMom3can8CQNhWrTRx" - test_message = "test message" - - @pytest.mark.asyncio - async def test_compare_pack_unpack(self, in_memory_wallet, wallet: IndySdkWallet): - """ - Ensure that python-based pack/unpack is compatible with indy-sdk implementation - """ - await in_memory_wallet.create_local_did(SOV, ED25519, self.test_seed) - py_packed = await in_memory_wallet.pack_message( - self.test_message, [self.test_verkey], self.test_verkey - ) - - await wallet.create_local_did(SOV, ED25519, self.test_seed) - packed = await wallet.pack_message( - self.test_message, [self.test_verkey], self.test_verkey - ) - - py_unpacked, from_vk, to_vk = await in_memory_wallet.unpack_message(packed) - assert self.test_message == py_unpacked - - unpacked, from_vk, to_vk = await wallet.unpack_message(py_packed) - assert self.test_message == unpacked - - @pytest.mark.asyncio - async def test_mock_coverage(self): - """ - Coverage through mock framework. - """ - wallet_key = await IndySdkWallet.generate_wallet_key() - storage_config_json = json.dumps({"url": "dummy"}) - storage_creds_json = json.dumps( - { - "account": "postgres", - "password": "mysecretpassword", - "admin_account": "postgres", - "admin_password": "mysecretpassword", - }, - ) - with mock.patch.object( - test_setup_module, - "load_postgres_plugin", - mock.MagicMock(), - ) as mock_load, mock.patch.object( - indy.wallet, "create_wallet", mock.CoroutineMock() - ) as mock_create, mock.patch.object( - indy.wallet, "open_wallet", mock.CoroutineMock() - ) as mock_open, mock.patch.object( - indy.anoncreds, "prover_create_master_secret", mock.CoroutineMock() - ) as mock_master, mock.patch.object( - indy.wallet, "close_wallet", mock.CoroutineMock() - ) as mock_close, mock.patch.object( - indy.wallet, "delete_wallet", mock.CoroutineMock() - ) as mock_delete: - fake_wallet = IndyWalletConfig( - { - "auto_recreate": True, - "auto_remove": False, - "name": "test_pg_wallet", - "key": wallet_key, - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": storage_config_json, - "storage_creds": storage_creds_json, - } - ) - mock_load.assert_called_once_with(storage_config_json, storage_creds_json) - assert fake_wallet.wallet_access - opened = await fake_wallet.create_wallet() - await opened.close() - await fake_wallet.remove_wallet() - - @pytest.mark.asyncio - async def test_mock_coverage_wallet_exists_x(self): - """ - Coverage through mock framework: raise on creation of existing wallet - """ - wallet_key = await IndySdkWallet.generate_wallet_key() - storage_config_json = json.dumps({"url": "dummy"}) - storage_creds_json = json.dumps( - { - "account": "postgres", - "password": "mysecretpassword", - "admin_account": "postgres", - "admin_password": "mysecretpassword", - }, - ) - with mock.patch.object( - test_setup_module, - "load_postgres_plugin", - mock.MagicMock(), - ) as mock_load, mock.patch.object( - indy.wallet, "create_wallet", mock.CoroutineMock() - ) as mock_create, mock.patch.object( - indy.wallet, "open_wallet", mock.CoroutineMock() - ) as mock_open, mock.patch.object( - indy.anoncreds, "prover_create_master_secret", mock.CoroutineMock() - ) as mock_master, mock.patch.object( - indy.wallet, "close_wallet", mock.CoroutineMock() - ) as mock_close, mock.patch.object( - indy.wallet, "delete_wallet", mock.CoroutineMock() - ) as mock_delete: - mock_create.side_effect = test_module.IndyError( - test_module.ErrorCode.WalletAlreadyExistsError - ) - fake_wallet = IndyWalletConfig( - { - "name": "test_pg_wallet", - "key": wallet_key, - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": storage_config_json, - "storage_creds": storage_creds_json, - } - ) - with pytest.raises(ProfileDuplicateError) as excinfo: - await fake_wallet.create_wallet() - - @pytest.mark.asyncio - async def test_mock_coverage_wallet_create_x(self): - """ - Coverage through mock framework: raise on creation outlier - """ - wallet_key = await IndySdkWallet.generate_wallet_key() - storage_config_json = json.dumps({"url": "dummy"}) - storage_creds_json = json.dumps( - { - "account": "postgres", - "password": "mysecretpassword", - "admin_account": "postgres", - "admin_password": "mysecretpassword", - }, - ) - with mock.patch.object( - test_setup_module, - "load_postgres_plugin", - mock.MagicMock(), - ) as mock_load, mock.patch.object( - indy.wallet, "create_wallet", mock.CoroutineMock() - ) as mock_create, mock.patch.object( - indy.wallet, "open_wallet", mock.CoroutineMock() - ) as mock_open, mock.patch.object( - indy.anoncreds, "prover_create_master_secret", mock.CoroutineMock() - ) as mock_master, mock.patch.object( - indy.wallet, "close_wallet", mock.CoroutineMock() - ) as mock_close, mock.patch.object( - indy.wallet, "delete_wallet", mock.CoroutineMock() - ) as mock_delete: - mock_create.side_effect = test_module.IndyError( - test_module.ErrorCode.CommonIOError, {"message": "outlier"} - ) - fake_wallet = IndyWalletConfig( - { - "auto_recreate": True, - "auto_remove": True, - "name": "test_pg_wallet", - "key": wallet_key, - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": storage_config_json, - "storage_creds": storage_creds_json, - } - ) - with pytest.raises(ProfileError) as excinfo: - await fake_wallet.create_wallet() - assert "outlier" in str(excinfo.value) - - @pytest.mark.asyncio - async def test_mock_coverage_remove_x(self): - """ - Coverage through mock framework: exception on removal. - """ - wallet_key = await IndySdkWallet.generate_wallet_key() - storage_config_json = json.dumps({"url": "dummy"}) - storage_creds_json = json.dumps( - { - "account": "postgres", - "password": "mysecretpassword", - "admin_account": "postgres", - "admin_password": "mysecretpassword", - }, - ) - with mock.patch.object( - test_setup_module, - "load_postgres_plugin", - mock.MagicMock(), - ) as mock_load, mock.patch.object( - indy.wallet, "create_wallet", mock.CoroutineMock() - ) as mock_create, mock.patch.object( - indy.wallet, "open_wallet", mock.CoroutineMock() - ) as mock_open, mock.patch.object( - indy.anoncreds, "prover_create_master_secret", mock.CoroutineMock() - ) as mock_master, mock.patch.object( - indy.wallet, "close_wallet", mock.CoroutineMock() - ) as mock_close, mock.patch.object( - indy.wallet, "delete_wallet", mock.CoroutineMock() - ) as mock_delete: - mock_delete.side_effect = test_module.IndyError( - test_module.ErrorCode.CommonIOError, {"message": "outlier"} - ) - fake_wallet = IndyWalletConfig( - { - "auto_recreate": False, - "auto_remove": False, - "name": "test_pg_wallet", - "key": wallet_key, - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": storage_config_json, - "storage_creds": storage_creds_json, - } - ) - mock_load.assert_called_once_with(storage_config_json, storage_creds_json) - assert fake_wallet.wallet_access - opened = await fake_wallet.create_wallet() - await opened.close() - with pytest.raises(ProfileError) as excinfo: - await fake_wallet.remove_wallet() - assert "outlier" in str(excinfo.value) - - @pytest.mark.asyncio - async def test_mock_coverage_not_found_after_creation(self): - """ - Coverage through mock framework: missing created wallet. - """ - wallet_key = await IndySdkWallet.generate_wallet_key() - storage_config_json = json.dumps({"url": "dummy"}) - storage_creds_json = json.dumps( - { - "account": "postgres", - "password": "mysecretpassword", - "admin_account": "postgres", - "admin_password": "mysecretpassword", - }, - ) - with mock.patch.object( - test_setup_module, - "load_postgres_plugin", - mock.MagicMock(), - ) as mock_load, mock.patch.object( - indy.wallet, "create_wallet", mock.CoroutineMock() - ) as mock_create, mock.patch.object( - indy.wallet, "open_wallet", mock.CoroutineMock() - ) as mock_open, mock.patch.object( - indy.anoncreds, "prover_create_master_secret", mock.CoroutineMock() - ) as mock_master, mock.patch.object( - indy.wallet, "close_wallet", mock.CoroutineMock() - ) as mock_close, mock.patch.object( - indy.wallet, "delete_wallet", mock.CoroutineMock() - ) as mock_delete: - mock_open.side_effect = test_module.IndyError( - test_module.ErrorCode.WalletNotFoundError, {"message": "outlier"} - ) - fake_wallet = IndyWalletConfig( - { - "auto_recreate": True, - "auto_remove": True, - "name": "test_pg_wallet", - "key": wallet_key, - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": storage_config_json, - "storage_creds": storage_creds_json, - } - ) - mock_load.assert_called_once_with(storage_config_json, storage_creds_json) - with pytest.raises(ProfileError) as excinfo: - await fake_wallet.create_wallet() - assert "not found" in str(excinfo.value) - - @pytest.mark.asyncio - async def test_mock_coverage_open_not_found(self): - """ - Coverage through mock framework: missing wallet on open. - """ - wallet_key = await IndySdkWallet.generate_wallet_key() - storage_config_json = json.dumps({"url": "dummy"}) - storage_creds_json = json.dumps( - { - "account": "postgres", - "password": "mysecretpassword", - "admin_account": "postgres", - "admin_password": "mysecretpassword", - }, - ) - with mock.patch.object( - test_setup_module, - "load_postgres_plugin", - mock.MagicMock(), - ) as mock_load, mock.patch.object( - indy.wallet, "create_wallet", mock.CoroutineMock() - ) as mock_create, mock.patch.object( - indy.wallet, "open_wallet", mock.CoroutineMock() - ) as mock_open, mock.patch.object( - indy.anoncreds, "prover_create_master_secret", mock.CoroutineMock() - ) as mock_master, mock.patch.object( - indy.wallet, "close_wallet", mock.CoroutineMock() - ) as mock_close, mock.patch.object( - indy.wallet, "delete_wallet", mock.CoroutineMock() - ) as mock_delete: - mock_open.side_effect = test_module.IndyError( - test_module.ErrorCode.WalletNotFoundError, {"message": "outlier"} - ) - fake_wallet = IndyWalletConfig( - { - "name": "test_pg_wallet", - "key": wallet_key, - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": storage_config_json, - "storage_creds": storage_creds_json, - } - ) - mock_load.assert_called_once_with(storage_config_json, storage_creds_json) - with pytest.raises(ProfileNotFoundError) as excinfo: - await fake_wallet.open_wallet() - assert "outlier" in str(excinfo.value) - - @pytest.mark.asyncio - async def test_mock_coverage_open_indy_already_open_x(self): - """ - Coverage through mock framework: indy thinks wallet is open, aca-py does not. - """ - wallet_key = await IndySdkWallet.generate_wallet_key() - storage_config_json = json.dumps({"url": "dummy"}) - storage_creds_json = json.dumps( - { - "account": "postgres", - "password": "mysecretpassword", - "admin_account": "postgres", - "admin_password": "mysecretpassword", - }, - ) - with mock.patch.object( - test_setup_module, - "load_postgres_plugin", - mock.MagicMock(), - ) as mock_load, mock.patch.object( - indy.wallet, "create_wallet", mock.CoroutineMock() - ) as mock_create, mock.patch.object( - indy.wallet, "open_wallet", mock.CoroutineMock() - ) as mock_open, mock.patch.object( - indy.anoncreds, "prover_create_master_secret", mock.CoroutineMock() - ) as mock_master, mock.patch.object( - indy.wallet, "close_wallet", mock.CoroutineMock() - ) as mock_close, mock.patch.object( - indy.wallet, "delete_wallet", mock.CoroutineMock() - ) as mock_delete: - mock_open.side_effect = test_module.IndyError( - test_module.ErrorCode.WalletAlreadyOpenedError, {"message": "outlier"} - ) - fake_wallet = IndyWalletConfig( - { - "name": "test_pg_wallet", - "key": wallet_key, - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": storage_config_json, - "storage_creds": storage_creds_json, - } - ) - mock_load.assert_called_once_with(storage_config_json, storage_creds_json) - with pytest.raises(ProfileError) as excinfo: - await fake_wallet.open_wallet() - assert "outlier" in str(excinfo.value) - - @pytest.mark.asyncio - async def test_mock_coverage_open_x(self): - """ - Coverage through mock framework: outlier on wallet open. - """ - wallet_key = await IndySdkWallet.generate_wallet_key() - storage_config_json = json.dumps({"url": "dummy"}) - storage_creds_json = json.dumps( - { - "account": "postgres", - "password": "mysecretpassword", - "admin_account": "postgres", - "admin_password": "mysecretpassword", - }, - ) - with mock.patch.object( - test_setup_module, - "load_postgres_plugin", - mock.MagicMock(), - ) as mock_load, mock.patch.object( - indy.wallet, "create_wallet", mock.CoroutineMock() - ) as mock_create, mock.patch.object( - indy.wallet, "open_wallet", mock.CoroutineMock() - ) as mock_open, mock.patch.object( - indy.anoncreds, "prover_create_master_secret", mock.CoroutineMock() - ) as mock_master, mock.patch.object( - indy.wallet, "close_wallet", mock.CoroutineMock() - ) as mock_close, mock.patch.object( - indy.wallet, "delete_wallet", mock.CoroutineMock() - ) as mock_delete: - mock_open.side_effect = test_module.IndyError( - test_module.ErrorCode.CommonIOError, {"message": "outlier"} - ) - fake_wallet = IndyWalletConfig( - { - "name": "test_pg_wallet", - "key": wallet_key, - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": storage_config_json, - "storage_creds": storage_creds_json, - } - ) - mock_load.assert_called_once_with(storage_config_json, storage_creds_json) - with pytest.raises(ProfileError) as excinfo: - await fake_wallet.open_wallet() - assert "outlier" in str(excinfo.value) - - @pytest.mark.asyncio - async def test_mock_coverage_open_master_secret_x(self): - """ - Coverage through mock framework: outlier on master secret creation - """ - wallet_key = await IndySdkWallet.generate_wallet_key() - storage_config_json = json.dumps({"url": "dummy"}) - storage_creds_json = json.dumps( - { - "account": "postgres", - "password": "mysecretpassword", - "admin_account": "postgres", - "admin_password": "mysecretpassword", - }, - ) - with mock.patch.object( - test_setup_module, - "load_postgres_plugin", - mock.MagicMock(), - ) as mock_load, mock.patch.object( - indy.wallet, "create_wallet", mock.CoroutineMock() - ) as mock_create, mock.patch.object( - indy.wallet, "open_wallet", mock.CoroutineMock() - ) as mock_open, mock.patch.object( - indy.anoncreds, "prover_create_master_secret", mock.CoroutineMock() - ) as mock_master, mock.patch.object( - indy.wallet, "close_wallet", mock.CoroutineMock() - ) as mock_close, mock.patch.object( - indy.wallet, "delete_wallet", mock.CoroutineMock() - ) as mock_delete: - mock_master.side_effect = test_module.IndyError( - test_module.ErrorCode.CommonIOError, {"message": "outlier"} - ) - fake_wallet = IndyWalletConfig( - { - "auto_recreate": True, - "auto_remove": True, - "name": "test_pg_wallet", - "key": wallet_key, - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": storage_config_json, - "storage_creds": storage_creds_json, - } - ) - mock_load.assert_called_once_with(storage_config_json, storage_creds_json) - with pytest.raises(ProfileError) as excinfo: - await fake_wallet.create_wallet() - assert "outlier" in str(excinfo.value) - - @pytest.mark.asyncio - async def test_mock_coverage_open_master_secret_exists(self): - """ - Coverage through mock framework: open, master secret exists (OK). - """ - wallet_key = await IndySdkWallet.generate_wallet_key() - storage_config_json = json.dumps({"url": "dummy"}) - storage_creds_json = json.dumps( - { - "account": "postgres", - "password": "mysecretpassword", - "admin_account": "postgres", - "admin_password": "mysecretpassword", - }, - ) - with mock.patch.object( - test_setup_module, - "load_postgres_plugin", - mock.MagicMock(), - ) as mock_load, mock.patch.object( - indy.wallet, "create_wallet", mock.CoroutineMock() - ) as mock_create, mock.patch.object( - indy.wallet, "open_wallet", mock.CoroutineMock() - ) as mock_open, mock.patch.object( - indy.anoncreds, "prover_create_master_secret", mock.CoroutineMock() - ) as mock_master, mock.patch.object( - indy.wallet, "close_wallet", mock.CoroutineMock() - ) as mock_close, mock.patch.object( - indy.wallet, "delete_wallet", mock.CoroutineMock() - ) as mock_delete: - mock_master.side_effect = test_module.IndyError( - test_module.ErrorCode.AnoncredsMasterSecretDuplicateNameError - ) - fake_wallet = IndyWalletConfig( - { - "auto_recreate": True, - "auto_remove": True, - "name": "test_pg_wallet", - "key": wallet_key, - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": storage_config_json, - "storage_creds": storage_creds_json, - } - ) - mock_load.assert_called_once_with(storage_config_json, storage_creds_json) - assert fake_wallet.wallet_access - opened = await fake_wallet.create_wallet() - assert opened.master_secret_id == fake_wallet.name - await opened.close() - await fake_wallet.remove_wallet() - - # TODO get these to run in docker ci/cd - @pytest.mark.asyncio - @pytest.mark.postgres - async def test_postgres_wallet_works(self): - """ - Ensure that postgres wallet operations work (create and open wallet, create did, drop wallet) - """ - postgres_url = os.environ.get("POSTGRES_URL") - if not postgres_url: - pytest.fail("POSTGRES_URL not configured") - - wallet_key = await IndySdkWallet.generate_wallet_key() - postgres_wallet = IndyWalletConfig( - { - "auto_recreate": True, - "auto_remove": True, - "name": "test_pg_wallet", - "key": wallet_key, - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": '{"url":"' + postgres_url + '"}', - "storage_creds": '{"account":"postgres","password":"mysecretpassword","admin_account":"postgres","admin_password":"mysecretpassword"}', - } - ) - assert postgres_wallet.wallet_access - opened = await postgres_wallet.create_wallet() - wallet = IndySdkWallet(opened) - - await wallet.create_local_did(SOV, ED25519, self.test_seed) - py_packed = await wallet.pack_message( - self.test_message, [self.test_verkey], self.test_verkey - ) - - await wallet.close() - await postgres_wallet.remove_wallet() - - # TODO get these to run in docker ci/cd - @pytest.mark.asyncio - @pytest.mark.postgres - async def test_postgres_wallet_scheme_works(self): - """ - Ensure that postgres wallet operations work (create and open wallet, create did, drop wallet) - """ - postgres_url = os.environ.get("POSTGRES_URL") - if not postgres_url: - pytest.fail("POSTGRES_URL not configured") - - wallet_key = await IndySdkWallet.generate_wallet_key() - postgres_wallet = IndyWalletConfig( - { - "auto_recreate": True, - "auto_remove": True, - "name": "test_pg_wallet", - "key": wallet_key, - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": '{"url":"' - + postgres_url - + '", "wallet_scheme":"MultiWalletSingleTable"}', - "storage_creds": '{"account":"postgres","password":"mysecretpassword","admin_account":"postgres","admin_password":"mysecretpassword"}', - } - ) - assert postgres_wallet.wallet_access - opened = await postgres_wallet.create_wallet() - - with pytest.raises(ProfileError) as excinfo: - await postgres_wallet.create_wallet() - assert "Wallet was not removed" in str(excinfo.value) - - wallet = IndySdkWallet(opened) - await wallet.create_local_did(SOV, ED25519, self.test_seed) - py_packed = await wallet.pack_message( - self.test_message, [self.test_verkey], self.test_verkey - ) - - await wallet.close() - await postgres_wallet.remove_wallet() - - # TODO get these to run in docker ci/cd - @pytest.mark.asyncio - @pytest.mark.postgres - async def test_postgres_wallet_scheme2_works(self): - """ - Ensure that postgres wallet operations work (create and open wallet, create did, drop wallet) - """ - postgres_url = os.environ.get("POSTGRES_URL") - if not postgres_url: - pytest.fail("POSTGRES_URL not configured") - - wallet_key = await IndySdkWallet.generate_wallet_key() - postgres_wallet = IndyWalletConfig( - { - "auto_recreate": True, - "auto_remove": True, - "name": "test_pg_wallet", - "key": wallet_key, - "key_derivation_method": "RAW", - "storage_type": "postgres_storage", - "storage_config": '{"url":"' - + postgres_url - + '", "wallet_scheme":"MultiWalletSingleTableSharedPool"}', - "storage_creds": '{"account":"postgres","password":"mysecretpassword","admin_account":"postgres","admin_password":"mysecretpassword"}', - } - ) - opened = await postgres_wallet.create_wallet() - wallet = IndySdkWallet(opened) - - await wallet.create_local_did(SOV, ED25519, self.test_seed) - py_packed = await wallet.pack_message( - self.test_message, [self.test_verkey], self.test_verkey - ) - - await wallet.close() - await postgres_wallet.remove_wallet() diff --git a/aries_cloudagent/wallet/tests/test_routes.py b/aries_cloudagent/wallet/tests/test_routes.py index f2b756de23..ba8fc4db4d 100644 --- a/aries_cloudagent/wallet/tests/test_routes.py +++ b/aries_cloudagent/wallet/tests/test_routes.py @@ -3,6 +3,7 @@ from aiohttp.web import HTTPForbidden from aries_cloudagent.tests import mock +from aries_cloudagent.wallet import singletons from ...admin.request_context import AdminRequestContext from ...core.in_memory import InMemoryProfile @@ -11,6 +12,7 @@ from ...wallet.did_method import SOV, DIDMethod, DIDMethods, HolderDefinedDid from ...wallet.key_type import ED25519, KeyTypes from .. import routes as test_module +from ..anoncreds_upgrade import UPGRADING_RECORD_IN_PROGRESS from ..base import BaseWallet from ..did_info import DIDInfo from ..did_posture import DIDPosture @@ -27,7 +29,9 @@ class TestWalletRoutes(IsolatedAsyncioTestCase): def setUp(self): self.wallet = mock.create_autospec(BaseWallet) self.session_inject = {BaseWallet: self.wallet} - self.profile = InMemoryProfile.test_profile() + self.profile = InMemoryProfile.test_profile( + settings={"admin.admin_api_key": "secret-key"} + ) self.context = AdminRequestContext.test_context( self.session_inject, self.profile ) @@ -41,6 +45,7 @@ def setUp(self): match_info={}, query={}, __getitem__=lambda _, k: self.request_dict[k], + headers={"x-api-key": "secret-key"}, ) self.test_did = "did" @@ -1006,6 +1011,26 @@ async def test_rotate_did_keypair_x(self): with self.assertRaises(test_module.web.HTTPBadRequest): await test_module.wallet_rotate_did_keypair(self.request) + async def test_upgrade_anoncreds(self): + self.profile.settings["wallet.name"] = "test_wallet" + self.request.query = {"wallet_name": "not_test_wallet"} + with self.assertRaises(test_module.web.HTTPBadRequest): + await test_module.upgrade_anoncreds(self.request) + + self.request.query = {"wallet_name": "not_test_wallet"} + self.profile.settings["wallet.type"] = "askar-anoncreds" + with self.assertRaises(test_module.web.HTTPBadRequest): + await test_module.upgrade_anoncreds(self.request) + + self.request.query = {"wallet_name": "test_wallet"} + self.profile.settings["wallet.type"] = "askar" + result = await test_module.upgrade_anoncreds(self.request) + print(result) + _, upgrade_record = next(iter(self.profile.records.items())) + assert upgrade_record.type == "acapy_upgrading" + assert upgrade_record.value == UPGRADING_RECORD_IN_PROGRESS + assert "test-profile" in singletons.UpgradeInProgressSingleton().wallets + async def test_register(self): mock_app = mock.MagicMock() mock_app.add_routes = mock.MagicMock() diff --git a/conftest.py b/conftest.py index cedd929864..5ab1eeb148 100644 --- a/conftest.py +++ b/conftest.py @@ -24,41 +24,6 @@ def stop(self): self.inner and self.inner.stop() -def stub_indy() -> Stub: - # detect indy module - try: - from indy.libindy import _cdll - - _cdll() - - return Stub(None) - except ImportError: - print("Skipping Indy-specific tests: python3-indy module not installed.") - except OSError: - print( - "Skipping Indy-specific tests: libindy shared library could not be loaded." - ) - - modules = {} - package_name = "indy" - modules[package_name] = mock.MagicMock() - for mod in [ - "anoncreds", - "blob_storage", - "crypto", - "did", - "error", - "pool", - "ledger", - "non_secrets", - "pairwise", - "wallet", - ]: - submod = f"{package_name}.{mod}" - modules[submod] = mock.MagicMock() - return Stub(mock.patch.dict(sys.modules, modules)) - - def stub_anoncreds() -> Stub: # detect anoncreds library try: @@ -200,7 +165,6 @@ def pytest_sessionstart(session): { "anoncreds": stub_anoncreds(), "askar": stub_askar(), - "indy": stub_indy(), "indy_credx": stub_indy_credx(), "indy_vdr": stub_indy_vdr(), "ursa_bbs_signatures": stub_ursa_bbs_signatures(), diff --git a/demo/docker-agent/Dockerfile.acapy b/demo/docker-agent/Dockerfile.acapy index f309b40950..f4b6d5e9de 100644 --- a/demo/docker-agent/Dockerfile.acapy +++ b/demo/docker-agent/Dockerfile.acapy @@ -1,4 +1,4 @@ -FROM ghcr.io/hyperledger/aries-cloudagent-python:py3.9-0.9.0 +FROM ghcr.io/hyperledger/aries-cloudagent-python:py3.9-0.12.1 USER root diff --git a/demo/docker-test/db/Dockerfile b/demo/docker-test/db/Dockerfile index 5d4e896508..a3ac2fa1d5 100644 --- a/demo/docker-test/db/Dockerfile +++ b/demo/docker-test/db/Dockerfile @@ -1,3 +1,3 @@ -FROM postgres:14 +FROM postgres:16 COPY ./init-postgres-role.sh /docker-entrypoint-initdb.d/init-postgres-role.sh CMD ["docker-entrypoint.sh", "postgres"] \ No newline at end of file diff --git a/demo/docker-test/docker-compose-agent.yml b/demo/docker-test/docker-compose-agent.yml index dbf43eb976..818ed8afdc 100644 --- a/demo/docker-test/docker-compose-agent.yml +++ b/demo/docker-test/docker-compose-agent.yml @@ -2,8 +2,8 @@ version: "3" services: vcr-agent: build: - context: ../../ - dockerfile: docker/Dockerfile.run + context: ../ + dockerfile: docker-agent/Dockerfile.acapy ports: - 8010:8010 - 8001:8001 diff --git a/demo/elk-stack/extensions/curator/Dockerfile b/demo/elk-stack/extensions/curator/Dockerfile index 6cb8cdc681..c1f04d1092 100644 --- a/demo/elk-stack/extensions/curator/Dockerfile +++ b/demo/elk-stack/extensions/curator/Dockerfile @@ -1,4 +1,4 @@ -FROM untergeek/curator:8.0.2 +FROM untergeek/curator:8.0.15 USER root diff --git a/demo/features/0160-connection.feature b/demo/features/0160-connection.feature index 6befe878cd..a68e651711 100644 --- a/demo/features/0160-connection.feature +++ b/demo/features/0160-connection.feature @@ -12,7 +12,7 @@ Feature: RFC 0160 Aries agent connection functions Then "Acme" has an active connection And "Bob" has an active connection - @GHA @UnqualifiedDids + @PR @Release @UnqualifiedDids Examples: | Acme_capabilities | Acme_extra | Bob_capabilities | Bob_extra | | --public-did --did-exchange --emit-did-peer-2 | | --did-exchange --emit-did-peer-2 | | @@ -40,7 +40,7 @@ Feature: RFC 0160 Aries agent connection functions | --did-exchange --emit-did-peer-4 | | --emit-did-peer-4 | | | --did-exchange --reuse-connections --emit-did-peer-4 | | --reuse-connections --emit-did-peer-4 | | - @GHA @MultiUseConnectionReuse + @PR @Release @MultiUseConnectionReuse Examples: | Acme_capabilities | Acme_extra | Bob_capabilities | Bob_extra | | --did-exchange --multi-use-invitations --emit-did-peer-2 | | --emit-did-peer-2 | | @@ -56,7 +56,7 @@ Feature: RFC 0160 Aries agent connection functions | --public-did --did-exchange --multi-use-invitations --emit-did-peer-4 | | --did-exchange --emit-did-peer-2 | | | --public-did --did-exchange --multi-use-invitations --reuse-connections --emit-did-peer-2 | | --did-exchange --reuse-connections --emit-did-peer-4 | | - @GHA @WalletType_Askar_AnonCreds + @PR @Release @WalletType_Askar_AnonCreds Examples: | Acme_capabilities | Acme_extra | Bob_capabilities | Bob_extra | | --public-did --did-exchange --wallet-type askar-anoncreds --emit-did-peer-2 | | --did-exchange --wallet-type askar-anoncreds --emit-did-peer-2 | | diff --git a/demo/features/0453-issue-credential.feature b/demo/features/0453-issue-credential.feature index 27f51cf062..fbe1c49c07 100644 --- a/demo/features/0453-issue-credential.feature +++ b/demo/features/0453-issue-credential.feature @@ -12,37 +12,37 @@ Feature: RFC 0453 Aries agent issue credential When "Acme" offers a credential with data Then "Bob" has the credential issued - @GHA @WalletType_Askar @BasicTest + @Release @WalletType_Askar @BasicTest Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Acme_extra | Bob_extra | | --public-did --did-exchange | --did-exchange | driverslicense | Data_DL_NormalizedValues | | | - @GHA @WalletType_Askar @AltTests + @Release @WalletType_Askar @AltTests Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Acme_extra | Bob_extra | | --public-did | | driverslicense | Data_DL_NormalizedValues | | | | --public-did --mediation | --mediation | driverslicense | Data_DL_NormalizedValues | | | | --public-did --multitenant | --multitenant --log-file | driverslicense | Data_DL_NormalizedValues | | | - @GHA @WalletType_Askar_AnonCreds @BasicTest + @Release @WalletType_Askar_AnonCreds @BasicTest Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Acme_extra | Bob_extra | | --public-did --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense | Data_DL_NormalizedValues | | | | --public-did --wallet-type askar-anoncreds --cred-type vc_di | --wallet-type askar-anoncreds | driverslicense | Data_DL_NormalizedValues | | | - @GHA @WalletType_Askar_AnonCreds @AltTests + @Release @WalletType_Askar_AnonCreds @AltTests Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Acme_extra | Bob_extra | | --public-did --wallet-type askar-anoncreds | | driverslicense | Data_DL_NormalizedValues | | | | --public-did | --wallet-type askar-anoncreds | driverslicense | Data_DL_NormalizedValues | | | - @GHA @WalletType_Askar @ConnectionTests + @PR @Release @WalletType_Askar @ConnectionTests Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Acme_extra | Bob_extra | | --did-exchange --emit-did-peer-4 | --did-exchange --emit-did-peer-4 | driverslicense | Data_DL_NormalizedValues | | | | --did-exchange --reuse-connections --emit-did-peer-4 | --did-exchange --reuse-connections --emit-did-peer-4 | driverslicense | Data_DL_NormalizedValues | | | - @GHA @WalletType_Askar_AnonCreds @ConnectionTests + @PR @Release @WalletType_Askar_AnonCreds @ConnectionTests Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Acme_extra | Bob_extra | | --did-exchange --wallet-type askar-anoncreds --emit-did-peer-4 | --did-exchange --wallet-type askar-anoncreds --emit-did-peer-4 | driverslicense | Data_DL_NormalizedValues | | | @@ -79,7 +79,7 @@ Feature: RFC 0453 Aries agent issue credential And "Acme" offers and deletes a credential with data Then "Bob" has the exchange abandoned - @GHA @WalletType_Askar + @PR @Release @WalletType_Askar Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --public-did | | driverslicense | Data_DL_NormalizedValues | @@ -106,7 +106,7 @@ Feature: RFC 0453 Aries agent issue credential And "Acme" is ready to issue a credential for When "Bob" requests a credential with data from "Acme" it fails - @GHA @WalletType_Askar + @PR @Release @WalletType_Askar Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --public-did | | driverslicense | Data_DL_NormalizedValues | @@ -137,7 +137,7 @@ Feature: RFC 0453 Aries agent issue credential Then "Bob" has the json-ld credential issued And "Acme" has the exchange completed - @GHA @WalletType_Askar + @PR @Release @WalletType_Askar Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --public-did --cred-type json-ld | | driverslicense | Data_DL_NormalizedValues | @@ -166,15 +166,20 @@ Feature: RFC 0453 Aries agent issue credential When "Acme" offers "Bob" a json-ld credential with data Then "Bob" has the json-ld credential issued - @GHA @WalletType_Askar + @PR @Release @WalletType_Askar Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --public-did --cred-type json-ld | | driverslicense | Data_DL_NormalizedValues | + + @Release @WalletType_Askar + Examples: + | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --public-did --cred-type json-ld --did-exchange | --did-exchange | driverslicense | Data_DL_NormalizedValues | | --public-did --cred-type json-ld --mediation | --mediation | driverslicense | Data_DL_NormalizedValues | | --public-did --cred-type json-ld --multitenant --log-file | --multitenant | driverslicense | Data_DL_NormalizedValues | - @GHA @WalletType_Askar_AnonCreds + + @Release @WalletType_Askar_AnonCreds Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --public-did --cred-type json-ld --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense | Data_DL_NormalizedValues | @@ -199,15 +204,19 @@ Feature: RFC 0453 Aries agent issue credential When "Bob" requests a json-ld credential with data from "Acme" Then "Bob" has the json-ld credential issued - @GHA @WalletType_Askar + @PR @Release @WalletType_Askar Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --public-did --cred-type json-ld | | driverslicense | Data_DL_NormalizedValues | + + @Release @WalletType_Askar + Examples: + | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --public-did --cred-type json-ld --did-exchange | --did-exchange | driverslicense | Data_DL_NormalizedValues | | --public-did --cred-type json-ld --mediation | --mediation | driverslicense | Data_DL_NormalizedValues | | --public-did --cred-type json-ld --multitenant | --multitenant | driverslicense | Data_DL_NormalizedValues | - @GHA @WalletType_Askar_AnonCreds + @PR @Release @WalletType_Askar_AnonCreds Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --public-did --cred-type json-ld --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense | Data_DL_NormalizedValues | @@ -224,14 +233,14 @@ Feature: RFC 0453 Aries agent issue credential Then "Acme" revokes the credential And "Bob" has the credential issued - @GHA @WalletType_Askar + @Release @WalletType_Askar Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --revocation --public-did | | driverslicense | Data_DL_NormalizedValues | | --revocation --public-did --did-exchange | --did-exchange | driverslicense | Data_DL_NormalizedValues | | --revocation --public-did --multitenant | --multitenant | driverslicense | Data_DL_NormalizedValues | - @WalletType_Askar_AnonCreds + @Release @WalletType_Askar_AnonCreds Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --revocation --public-did --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense | Data_DL_NormalizedValues | diff --git a/demo/features/0454-present-proof.feature b/demo/features/0454-present-proof.feature index 8787541703..f40552e003 100644 --- a/demo/features/0454-present-proof.feature +++ b/demo/features/0454-present-proof.feature @@ -12,21 +12,29 @@ Feature: RFC 0454 Aries agent present proof When "Faber" sends a request for proof presentation to "Bob" Then "Faber" has the proof verified - @GHA @WalletType_Askar + @PR @Release @WalletType_Askar Examples: | issuer | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Proof_request | | Faber | --public-did | | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | + + @Release @WalletType_Askar + Examples: + | issuer | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Proof_request | | Faber | --public-did --did-exchange | --did-exchange | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | - @GHA @WalletType_Askar_AnonCreds + @PR @Release @WalletType_Askar_AnonCreds Examples: | issuer | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Proof_request | | Faber | --public-did --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | + + @Release @WalletType_Askar_AnonCreds + Examples: + | issuer | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Proof_request | | Faber | --public-did --wallet-type askar-anoncreds | | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | | Faber | --public-did | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | - @T001-RFC0454-DID-PEER @GHA + @T001-RFC0454-DID-PEER Scenario Outline: Present Proof where the prover does not propose a presentation of the proof and is acknowledged Given we have "2" agents | name | role | capabilities | extra | @@ -38,12 +46,12 @@ Feature: RFC 0454 Aries agent present proof When "Faber" sends a request for proof presentation to "Bob" Then "Faber" has the proof verified - @WalletType_Askar + @PR @Release @WalletType_Askar Examples: | issuer | Acme_capabilities | Acme_extra | Bob_capabilities | Bob_extra | Schema_name | Credential_data | Proof_request | | Faber | --public-did --did-exchange --emit-did-peer-2 | | --did-exchange --emit-did-peer-2 | | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | - @WalletType_Askar_AnonCreds + @PR @Release @WalletType_Askar_AnonCreds Examples: | issuer | Acme_capabilities | Acme_extra | Bob_capabilities | Bob_extra | Schema_name | Credential_data | Proof_request | | Faber | --public-did --wallet-type askar-anoncreds --emit-did-peer-2 | | --wallet-type askar-anoncreds --emit-did-peer-2 | | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | @@ -90,13 +98,17 @@ Feature: RFC 0454 Aries agent present proof When "Faber" sends a request for json-ld proof presentation to "Bob" Then "Faber" has the proof verified - @GHA @WalletType_Askar + @PR @Release @WalletType_Askar Examples: | issuer | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Proof_request | | Acme | --public-did --cred-type json-ld | | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | + + @Release @WalletType_Askar + Examples: + | issuer | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Proof_request | | Faber | --public-did --cred-type json-ld --did-exchange | --did-exchange | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | - @WalletType_Askar_AnonCreds + @PR @Release @WalletType_Askar_AnonCreds Examples: | issuer | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Proof_request | | Faber | --public-did --cred-type json-ld --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | @@ -115,13 +127,17 @@ Feature: RFC 0454 Aries agent present proof When "Faber" sends a request for proof presentation to "Bob" Then "Faber" has the proof verification fail - @GHA @WalletType_Askar + @PR @Release @WalletType_Askar Examples: | issuer | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Proof_request | | Faber | --revocation --public-did | | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | + + @Release @WalletType_Askar + Examples: + | issuer | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Proof_request | | Faber | --revocation --public-did --did-exchange | --did-exchange | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | - @GHA @WalletType_Askar_AnonCreds + @PR @Release @WalletType_Askar_AnonCreds Examples: | issuer | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Proof_request | | Faber | --revocation --public-did --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | @@ -172,12 +188,12 @@ Feature: RFC 0454 Aries agent present proof When "Faber" sends a request for proof presentation to "Bob" Then "Faber" has the proof verified - @GHA @WalletType_Askar + @PR @Release @WalletType_Askar Examples: | issuer1 | Acme1_capabilities | issuer2 | Acme2_capabilities | Bob_cap | Schema_name_1 | Credential_data_1 | Schema_name_2 | Credential_data_2 | Proof_request | | Acme1 | --revocation --public-did | Acme2 | --public-did | | driverslicense_v2 | Data_DL_MaxValues | health_id | Data_DL_MaxValues | DL_age_over_19_v2_with_health_id | - @WalletType_Askar_AnonCreds + @PR @Release @WalletType_Askar_AnonCreds Examples: | issuer1 | Acme1_capabilities | issuer2 | Acme2_capabilities | Bob_cap | Schema_name_1 | Credential_data_1 | Schema_name_2 | Credential_data_2 | Proof_request | | Acme1 | --revocation --public-did --wallet-type askar-anoncreds | Acme2 | --public-did --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | health_id | Data_DL_MaxValues | DL_age_over_19_v2_with_health_id | @@ -198,12 +214,12 @@ Feature: RFC 0454 Aries agent present proof When "Faber" sends a request for proof presentation to "Bob" Then "Faber" has the proof verification fail - @WalletType_Askar + @PR @Release @WalletType_Askar Examples: | issuer1 | Acme1_capabilities | issuer2 | Acme2_capabilities | Bob_cap | Schema_name_1 | Credential_data_1 | Schema_name_2 | Credential_data_2 | Proof_request | | Acme1 | --revocation --public-did | Acme2 | --public-did | | driverslicense_v2 | Data_DL_MaxValues | health_id | Data_DL_MaxValues | DL_age_over_19_v2_with_health_id_r2 | - @WalletType_Askar_AnonCreds + @TODO @WalletType_Askar_AnonCreds Examples: | issuer1 | Acme1_capabilities | issuer2 | Acme2_capabilities | Bob_cap | Schema_name_1 | Credential_data_1 | Schema_name_2 | Credential_data_2 | Proof_request | | Acme1 | --revocation --public-did --wallet-type askar-anoncreds | Acme2 | --public-did --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | health_id | Data_DL_MaxValues | DL_age_over_19_v2_with_health_id_r2 | @@ -225,13 +241,13 @@ Feature: RFC 0454 Aries agent present proof When "Faber" sends a request for proof presentation to "Bob" Then "Faber" has the proof verification fail - @GHA @WalletType_Askar + @PR @Release @WalletType_Askar Examples: | issuer1 | Acme1_capabilities | issuer2 | Acme2_capabilities | Bob_cap | Schema_name_1 | Credential_data_1 | Schema_name_2 | Credential_data_2 | Proof_request | | Acme1 | --revocation --public-did | Acme2 | --public-did | | driverslicense_v2 | Data_DL_MaxValues | health_id | Data_DL_MaxValues | DL_age_over_19_v2_with_health_id | | Acme1 | --revocation --public-did | Acme2 | --public-did | | driverslicense_v2 | Data_DL_MaxValues | health_id | Data_DL_MaxValues | DL_age_over_19_v2_with_health_id_r2 | - @WalletType_Askar_AnonCreds + @PR @Release @WalletType_Askar_AnonCreds Examples: | issuer1 | Acme1_capabilities | issuer2 | Acme2_capabilities | Bob_cap | Schema_name_1 | Credential_data_1 | Schema_name_2 | Credential_data_2 | Proof_request | | Acme1 | --revocation --public-did --wallet-type askar-anoncreds | Acme2 | --public-did --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | health_id | Data_DL_MaxValues | DL_age_over_19_v2_with_health_id | @@ -253,12 +269,37 @@ Feature: RFC 0454 Aries agent present proof When "Faber" sends a request with explicit revocation status for proof presentation to "Bob" Then "Faber" has the proof verified - @GHA @WalletType_Askar + @PR @Release @WalletType_Askar Examples: | issuer1 | Acme1_capabilities | issuer2 | Acme2_capabilities | Bob_cap | Schema_name_1 | Credential_data_1 | Schema_name_2 | Credential_data_2 | Proof_request | | Acme1 | --revocation --public-did | Acme2 | --public-did | | driverslicense_v2 | Data_DL_MaxValues | health_id | Data_DL_MaxValues | DL_age_over_19_v2_with_health_id_no_revoc | - @WalletType_Askar_AnonCreds + @PR @Release @WalletType_Askar_AnonCreds Examples: | issuer1 | Acme1_capabilities | issuer2 | Acme2_capabilities | Bob_cap | Schema_name_1 | Credential_data_1 | Schema_name_2 | Credential_data_2 | Proof_request | | Acme1 | --revocation --public-did --wallet-type askar-anoncreds | Acme2 | --public-did --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | health_id | Data_DL_MaxValues | DL_age_over_19_v2_with_health_id_no_revoc | + + @T003-RFC0454.4 + Scenario Outline: Present Proof for a credential where multiple credentials are issued and all but one are revoked + Given we have "3" agents + | name | role | capabilities | + | Acme1 | issuer1 | | + | Faber | verifier | | + | Bob | prover | | + And "" and "Bob" have an existing connection + And "Bob" has an issued credential from "" + And "" revokes the credential + And "Bob" has another issued credential from "" + And "Faber" and "Bob" have an existing connection + When "Faber" sends a request with explicit revocation status for proof presentation to "Bob" + Then "Faber" has the proof verified + + @WalletType_Askar + Examples: + | issuer1 | Acme1_capabilities | Bob_cap | Schema_name_1 | Credential_data_1 | Proof_request | + | Acme1 | --revocation --public-did | | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | + + @WalletType_Askar_AnonCreds + Examples: + | issuer1 | Acme1_capabilities | Bob_cap | Schema_name_1 | Credential_data_1 | Proof_request | + | Acme1 | --revocation --public-did --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | diff --git a/demo/features/0586-sign-transaction.feature b/demo/features/0586-sign-transaction.feature index 3a57b76e08..7fc2198115 100644 --- a/demo/features/0586-sign-transaction.feature +++ b/demo/features/0586-sign-transaction.feature @@ -19,7 +19,7 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions Then "Bob" can write the transaction to the ledger And "Bob" has written the schema to the ledger - @GHA + @Release Examples: | Acme_capabilities | Bob_capabilities | Schema_name | | --did-exchange | --did-exchange | driverslicense | @@ -27,13 +27,13 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions | --multitenant | --multitenant | driverslicense | | --mediation --multitenant | --mediation --multitenant | driverslicense | - @Mulitledger + @TODO @Mulitledger Examples: | Acme_capabilities | Bob_capabilities | Schema_name | | --multitenant --multi-ledger | --multitenant --multi-ledger | driverslicense | | --multitenant --multi-ledger --revocation | --multitenant --multi-ledger --revocation | driverslicense | - @WalletType_Askar_AnonCreds @GHA + @Release @WalletType_Askar_AnonCreds Examples: | Acme_capabilities | Bob_capabilities | Schema_name | | --wallet-type askar-anoncreds | | driverslicense | @@ -101,12 +101,16 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions And "Bob" authors a revocation registry entry publishing transaction Then "Acme" can verify the credential from "Bob" was revoked - @GHA + @Release + Examples: + | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | + | --revocation --public-did --multitenant | --revocation --multitenant | driverslicense | Data_DL_NormalizedValues | + + @Release Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --revocation --public-did --did-exchange | --revocation --did-exchange | driverslicense | Data_DL_NormalizedValues | | --revocation --public-did --mediation | --revocation --mediation | driverslicense | Data_DL_NormalizedValues | - | --revocation --public-did --multitenant | --revocation --multitenant | driverslicense | Data_DL_NormalizedValues | | --revocation --public-did --mediation --multitenant | --revocation --mediation --multitenant | driverslicense | Data_DL_NormalizedValues | @Mulitledger @@ -114,7 +118,7 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions | Acme_capabilities | Bob_capabilties | Schema_name | Credential_data | | --multitenant --multi-ledger --revocation --public-did | --multitenant --multi-ledger --revocation | driverslicense | Data_DL_NormalizedValues | - @WalletType_Askar_AnonCreds @GHA + @PR @Release @WalletType_Askar_AnonCreds Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --revocation --public-did --did-exchange | --revocation --did-exchange --wallet-type askar-anoncreds | anoncreds-testing | Data_AC_NormalizedValues | @@ -201,7 +205,7 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation --wallet-type askar-anoncreds | anoncreds-testing | Data_AC_NormalizedValues | - @T003.1-RFC0586 @GHA + @T003.1-RFC0586 Scenario Outline: endorse a schema and cred def transaction, write to the ledger, issue and revoke a credential, with auto endorsing workflow Given we have "2" agents | name | role | capabilities | @@ -224,13 +228,23 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions And "Bob" authors a revocation registry entry publishing transaction with txn endorsement Then "Acme" can verify the credential from "Bob" was revoked + @PR @Release + Examples: + | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | + | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation | driverslicense | Data_DL_NormalizedValues | + + @Release Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation | driverslicense | Data_DL_NormalizedValues | | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation --multitenant | driverslicense | Data_DL_NormalizedValues | - @WalletType_Askar_AnonCreds + @PR @Release @WalletType_Askar_AnonCreds Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation --wallet-type askar-anoncreds | anoncreds-testing | Data_AC_NormalizedValues | + + @Release @WalletType_Askar_AnonCreds + Examples: + | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation --multitenant --wallet-type askar-anoncreds | anoncreds-testing | Data_AC_NormalizedValues | diff --git a/demo/features/revocation-api.feature b/demo/features/revocation-api.feature index 7145e2f71b..ebfdb120de 100644 --- a/demo/features/revocation-api.feature +++ b/demo/features/revocation-api.feature @@ -1,6 +1,6 @@ Feature: ACA-Py Revocation API - @Revoc-api @GHA + @Revoc-api @PR @Release Scenario Outline: Using revocation api, issue and revoke credentials Given we have "3" agents | name | role | capabilities | @@ -19,7 +19,7 @@ Feature: ACA-Py Revocation API #| Acme | --revocation --public-did | | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | | Acme | --revocation --public-did --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | - @Revoc-api @GHA + @Revoc-api @PR @Release Scenario Outline: Using revocation api, issue, revoke credentials and publish Given we have "3" agents | name | role | capabilities | @@ -44,7 +44,7 @@ Feature: ACA-Py Revocation API | Acme | --revocation --public-did --multitenant | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | | Acme | --revocation --public-did --multitenant --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | - @Revoc-api.x @GHA-Anoncreds-break + @Revoc-api.x @PR-Anoncreds-break Scenario Outline: Without endorser: issue, revoke credentials, manually create revocation registries Given we have "3" agents | name | role | capabilities | @@ -76,7 +76,7 @@ Feature: ACA-Py Revocation API | Acme | --revocation --public-did --did-exchange --multitenant --wallet-type askar | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | | Acme | --revocation --public-did --did-exchange --multitenant --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | - @Revoc-api @GHA + @Revoc-api @PR @Release Scenario Outline: Using revocation api, rotate revocation Given we have "3" agents | name | role | capabilities | @@ -93,7 +93,7 @@ Feature: ACA-Py Revocation API #| Acme | --revocation --public-did | | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | | Acme | --revocation --public-did --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | - @Revoc-api @GHA + @Revoc-api @PR @Release Scenario Outline: Using revocation api, fill registry (need to run with "TAILS_FILE_COUNT": "4" env var) Given we have "2" agents | name | role | capabilities | diff --git a/demo/features/steps/0453-issue-credential.py b/demo/features/steps/0453-issue-credential.py index e702403d51..6dc33f428d 100644 --- a/demo/features/steps/0453-issue-credential.py +++ b/demo/features/steps/0453-issue-credential.py @@ -695,3 +695,22 @@ def step_impl(context, holder, schema_name, credential_data, issuer): + """" has the credential issued """ ) + + +@given( + '"{holder}" has another issued {schema_name} credential {credential_data} from "{issuer}"' +) +def step_impl(context, holder, schema_name, credential_data, issuer): + context.execute_steps( + # TODO possibly check that the requested schema is "active" (if there are multiple schemas) + ''' + When "''' + + issuer + + """" offers a credential with data """ + + credential_data + + ''' + Then "''' + + holder + + """" has the credential issued + """ + ) diff --git a/demo/features/steps/0586-sign-transaction.py b/demo/features/steps/0586-sign-transaction.py index da112e14d5..406db972a0 100644 --- a/demo/features/steps/0586-sign-transaction.py +++ b/demo/features/steps/0586-sign-transaction.py @@ -761,7 +761,6 @@ def step_impl(context, holder_name, issuer_name): "/credentials", params={}, ) - assert len(cred_list["results"]) == 1 cred_id = cred_list["results"][0]["referent"] revoc_status_bool = False diff --git a/demo/features/steps/upgrade.py b/demo/features/steps/upgrade.py new file mode 100644 index 0000000000..fe23f2570e --- /dev/null +++ b/demo/features/steps/upgrade.py @@ -0,0 +1,24 @@ +"""Steps for upgrading the wallet to support anoncreds.""" + +from bdd_support.agent_backchannel_client import ( + agent_container_POST, + async_sleep, +) +from behave import given, then + + +@given('"{issuer}" upgrades the wallet to anoncreds') +@then('"{issuer}" upgrades the wallet to anoncreds') +def step_impl(context, issuer): + """Upgrade the wallet to support anoncreds.""" + agent = context.active_agents[issuer] + agent_container_POST( + agent["agent"], + "/anoncreds/wallet/upgrade", + data={}, + params={ + "wallet_name": agent["agent"].agent.wallet_name, + }, + ) + + async_sleep(2.0) diff --git a/demo/features/upgrade.feature b/demo/features/upgrade.feature new file mode 100644 index 0000000000..259c7a485a --- /dev/null +++ b/demo/features/upgrade.feature @@ -0,0 +1,29 @@ +Feature: ACA-Py Anoncreds Upgrade + + @PR @Release + Scenario Outline: Using revocation api, issue, revoke credentials and publish + Given we have "3" agents + | name | role | capabilities | + | Acme | issuer | | + | Faber | verifier | | + | Bob | prover | | + And "" and "Bob" have an existing connection + And "Bob" has an issued credential from "" + And "" has written the credential definition for to the ledger + And "" has written the revocation registry definition to the ledger + And "" has written the revocation registry entry transaction to the ledger + And "" revokes the credential without publishing the entry + And "" authors a revocation registry entry publishing transaction + And "Faber" and "Bob" have an existing connection + When "Faber" sends a request for proof presentation to "Bob" + Then "Faber" has the proof verification fail + Then "Bob" can verify the credential from "" was revoked + And "" upgrades the wallet to anoncreds + And "Bob" has an issued credential from "" + And "Bob" upgrades the wallet to anoncreds + And "Bob" has an issued credential from "" + When "Faber" sends a request for proof presentation to "Bob" + + Examples: + | issuer | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Proof_request | + | Acme | --revocation --public-did --multitenant | --multitenant | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | \ No newline at end of file diff --git a/demo/multi-demo/Dockerfile.acapy b/demo/multi-demo/Dockerfile.acapy index f309b40950..f4b6d5e9de 100644 --- a/demo/multi-demo/Dockerfile.acapy +++ b/demo/multi-demo/Dockerfile.acapy @@ -1,4 +1,4 @@ -FROM ghcr.io/hyperledger/aries-cloudagent-python:py3.9-0.9.0 +FROM ghcr.io/hyperledger/aries-cloudagent-python:py3.9-0.12.1 USER root diff --git a/demo/playground/Dockerfile.acapy b/demo/playground/Dockerfile.acapy index 956205df1f..d5b0647705 100644 --- a/demo/playground/Dockerfile.acapy +++ b/demo/playground/Dockerfile.acapy @@ -1,4 +1,4 @@ -FROM ghcr.io/hyperledger/aries-cloudagent-python:py3.9-0.10.4 +FROM ghcr.io/hyperledger/aries-cloudagent-python:py3.9-0.12.1 USER root diff --git a/demo/playground/examples/poetry.lock b/demo/playground/examples/poetry.lock index eb054b9b75..276a150f88 100644 --- a/demo/playground/examples/poetry.lock +++ b/demo/playground/examples/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "asynctest" @@ -13,112 +13,112 @@ files = [ [[package]] name = "certifi" -version = "2023.7.22" +version = "2024.6.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, + {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, ] [[package]] name = "charset-normalizer" -version = "3.3.1" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.1.tar.gz", hash = "sha256:d9137a876020661972ca6eec0766d81aef8a5627df628b664b234b73396e727e"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8aee051c89e13565c6bd366813c386939f8e928af93c29fda4af86d25b73d8f8"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:352a88c3df0d1fa886562384b86f9a9e27563d4704ee0e9d56ec6fcd270ea690"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:223b4d54561c01048f657fa6ce41461d5ad8ff128b9678cfe8b2ecd951e3f8a2"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f861d94c2a450b974b86093c6c027888627b8082f1299dfd5a4bae8e2292821"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1171ef1fc5ab4693c5d151ae0fdad7f7349920eabbaca6271f95969fa0756c2d"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28f512b9a33235545fbbdac6a330a510b63be278a50071a336afc1b78781b147"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0e842112fe3f1a4ffcf64b06dc4c61a88441c2f02f373367f7b4c1aa9be2ad5"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f9bc2ce123637a60ebe819f9fccc614da1bcc05798bbbaf2dd4ec91f3e08846"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f194cce575e59ffe442c10a360182a986535fd90b57f7debfaa5c845c409ecc3"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9a74041ba0bfa9bc9b9bb2cd3238a6ab3b7618e759b41bd15b5f6ad958d17605"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b578cbe580e3b41ad17b1c428f382c814b32a6ce90f2d8e39e2e635d49e498d1"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6db3cfb9b4fcecb4390db154e75b49578c87a3b9979b40cdf90d7e4b945656e1"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:debb633f3f7856f95ad957d9b9c781f8e2c6303ef21724ec94bea2ce2fcbd056"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-win32.whl", hash = "sha256:87071618d3d8ec8b186d53cb6e66955ef2a0e4fa63ccd3709c0c90ac5a43520f"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:e372d7dfd154009142631de2d316adad3cc1c36c32a38b16a4751ba78da2a397"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae4070f741f8d809075ef697877fd350ecf0b7c5837ed68738607ee0a2c572cf"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58e875eb7016fd014c0eea46c6fa92b87b62c0cb31b9feae25cbbe62c919f54d"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dbd95e300367aa0827496fe75a1766d198d34385a58f97683fe6e07f89ca3e3c"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de0b4caa1c8a21394e8ce971997614a17648f94e1cd0640fbd6b4d14cab13a72"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:985c7965f62f6f32bf432e2681173db41336a9c2611693247069288bcb0c7f8b"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a15c1fe6d26e83fd2e5972425a772cca158eae58b05d4a25a4e474c221053e2d"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae55d592b02c4349525b6ed8f74c692509e5adffa842e582c0f861751701a673"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be4d9c2770044a59715eb57c1144dedea7c5d5ae80c68fb9959515037cde2008"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:851cf693fb3aaef71031237cd68699dded198657ec1e76a76eb8be58c03a5d1f"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:31bbaba7218904d2eabecf4feec0d07469284e952a27400f23b6628439439fa7"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:871d045d6ccc181fd863a3cd66ee8e395523ebfbc57f85f91f035f50cee8e3d4"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:501adc5eb6cd5f40a6f77fbd90e5ab915c8fd6e8c614af2db5561e16c600d6f3"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f5fb672c396d826ca16a022ac04c9dce74e00a1c344f6ad1a0fdc1ba1f332213"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-win32.whl", hash = "sha256:bb06098d019766ca16fc915ecaa455c1f1cd594204e7f840cd6258237b5079a8"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:8af5a8917b8af42295e86b64903156b4f110a30dca5f3b5aedea123fbd638bff"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae8e5142dcc7a49168f4055255dbcced01dc1714a90a21f87448dc8d90617d1"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5b70bab78accbc672f50e878a5b73ca692f45f5b5e25c8066d748c09405e6a55"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ceca5876032362ae73b83347be8b5dbd2d1faf3358deb38c9c88776779b2e2f"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34d95638ff3613849f473afc33f65c401a89f3b9528d0d213c7037c398a51296"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9edbe6a5bf8b56a4a84533ba2b2f489d0046e755c29616ef8830f9e7d9cf5728"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6a02a3c7950cafaadcd46a226ad9e12fc9744652cc69f9e5534f98b47f3bbcf"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10b8dd31e10f32410751b3430996f9807fc4d1587ca69772e2aa940a82ab571a"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edc0202099ea1d82844316604e17d2b175044f9bcb6b398aab781eba957224bd"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b891a2f68e09c5ef989007fac11476ed33c5c9994449a4e2c3386529d703dc8b"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:71ef3b9be10070360f289aea4838c784f8b851be3ba58cf796262b57775c2f14"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:55602981b2dbf8184c098bc10287e8c245e351cd4fdcad050bd7199d5a8bf514"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:46fb9970aa5eeca547d7aa0de5d4b124a288b42eaefac677bde805013c95725c"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:520b7a142d2524f999447b3a0cf95115df81c4f33003c51a6ab637cbda9d0bf4"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-win32.whl", hash = "sha256:8ec8ef42c6cd5856a7613dcd1eaf21e5573b2185263d87d27c8edcae33b62a61"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:baec8148d6b8bd5cee1ae138ba658c71f5b03e0d69d5907703e3e1df96db5e41"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63a6f59e2d01310f754c270e4a257426fe5a591dc487f1983b3bbe793cf6bac6"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d6bfc32a68bc0933819cfdfe45f9abc3cae3877e1d90aac7259d57e6e0f85b1"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f3100d86dcd03c03f7e9c3fdb23d92e32abbca07e7c13ebd7ddfbcb06f5991f"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39b70a6f88eebe239fa775190796d55a33cfb6d36b9ffdd37843f7c4c1b5dc67"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e12f8ee80aa35e746230a2af83e81bd6b52daa92a8afaef4fea4a2ce9b9f4fa"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b6cefa579e1237ce198619b76eaa148b71894fb0d6bcf9024460f9bf30fd228"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:61f1e3fb621f5420523abb71f5771a204b33c21d31e7d9d86881b2cffe92c47c"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4f6e2a839f83a6a76854d12dbebde50e4b1afa63e27761549d006fa53e9aa80e"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:1ec937546cad86d0dce5396748bf392bb7b62a9eeb8c66efac60e947697f0e58"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:82ca51ff0fc5b641a2d4e1cc8c5ff108699b7a56d7f3ad6f6da9dbb6f0145b48"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:633968254f8d421e70f91c6ebe71ed0ab140220469cf87a9857e21c16687c034"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-win32.whl", hash = "sha256:c0c72d34e7de5604df0fde3644cc079feee5e55464967d10b24b1de268deceb9"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:63accd11149c0f9a99e3bc095bbdb5a464862d77a7e309ad5938fbc8721235ae"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5a3580a4fdc4ac05f9e53c57f965e3594b2f99796231380adb2baaab96e22761"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2465aa50c9299d615d757c1c888bc6fef384b7c4aec81c05a0172b4400f98557"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb7cd68814308aade9d0c93c5bd2ade9f9441666f8ba5aa9c2d4b389cb5e2a45"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e43805ccafa0a91831f9cd5443aa34528c0c3f2cc48c4cb3d9a7721053874b"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:854cc74367180beb327ab9d00f964f6d91da06450b0855cbbb09187bcdb02de5"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c15070ebf11b8b7fd1bfff7217e9324963c82dbdf6182ff7050519e350e7ad9f"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4c99f98fc3a1835af8179dcc9013f93594d0670e2fa80c83aa36346ee763d2"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fb765362688821404ad6cf86772fc54993ec11577cd5a92ac44b4c2ba52155b"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dced27917823df984fe0c80a5c4ad75cf58df0fbfae890bc08004cd3888922a2"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a66bcdf19c1a523e41b8e9d53d0cedbfbac2e93c649a2e9502cb26c014d0980c"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ecd26be9f112c4f96718290c10f4caea6cc798459a3a76636b817a0ed7874e42"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3f70fd716855cd3b855316b226a1ac8bdb3caf4f7ea96edcccc6f484217c9597"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:17a866d61259c7de1bdadef418a37755050ddb4b922df8b356503234fff7932c"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-win32.whl", hash = "sha256:548eefad783ed787b38cb6f9a574bd8664468cc76d1538215d510a3cd41406cb"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:45f053a0ece92c734d874861ffe6e3cc92150e32136dd59ab1fb070575189c97"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bc791ec3fd0c4309a753f95bb6c749ef0d8ea3aea91f07ee1cf06b7b02118f2f"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8c61fb505c7dad1d251c284e712d4e0372cef3b067f7ddf82a7fa82e1e9a93"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2c092be3885a1b7899cd85ce24acedc1034199d6fca1483fa2c3a35c86e43041"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2000c54c395d9e5e44c99dc7c20a64dc371f777faf8bae4919ad3e99ce5253e"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cb50a0335382aac15c31b61d8531bc9bb657cfd848b1d7158009472189f3d62"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c30187840d36d0ba2893bc3271a36a517a717f9fd383a98e2697ee890a37c273"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe81b35c33772e56f4b6cf62cf4aedc1762ef7162a31e6ac7fe5e40d0149eb67"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0bf89afcbcf4d1bb2652f6580e5e55a840fdf87384f6063c4a4f0c95e378656"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:06cf46bdff72f58645434d467bf5228080801298fbba19fe268a01b4534467f5"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3c66df3f41abee950d6638adc7eac4730a306b022570f71dd0bd6ba53503ab57"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd805513198304026bd379d1d516afbf6c3c13f4382134a2c526b8b854da1c2e"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:9505dc359edb6a330efcd2be825fdb73ee3e628d9010597aa1aee5aa63442e97"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:31445f38053476a0c4e6d12b047b08ced81e2c7c712e5a1ad97bc913256f91b2"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-win32.whl", hash = "sha256:bd28b31730f0e982ace8663d108e01199098432a30a4c410d06fe08fdb9e93f4"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:555fe186da0068d3354cdf4bbcbc609b0ecae4d04c921cc13e209eece7720727"}, - {file = "charset_normalizer-3.3.1-py3-none-any.whl", hash = "sha256:800561453acdecedaac137bf09cd719c7a440b6800ec182f077bb8e7025fb708"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] @@ -134,13 +134,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.1.3" +version = "1.2.1" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, ] [package.extras] @@ -170,24 +170,24 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -196,13 +196,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pytest" -version = "7.4.2" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, - {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -218,31 +218,31 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest-asyncio" -version = "0.21.1" +version = "0.23.7" description = "Pytest support for asyncio" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, - {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, + {file = "pytest_asyncio-0.23.7-py3-none-any.whl", hash = "sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b"}, + {file = "pytest_asyncio-0.23.7.tar.gz", hash = "sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268"}, ] [package.dependencies] -pytest = ">=7.0.0" +pytest = ">=7.0.0,<9" [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -268,22 +268,22 @@ files = [ [[package]] name = "urllib3" -version = "2.0.7" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, - {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "c9c1bb0fed9cb41eaa74390c7ba76d4e84a6fc5a0a849f770640a4dbcfb1fbca" +content-hash = "341ab30c68bf565ad15df85c008f204b909c4c60cf2aa0c46b2d42bd97287b29" diff --git a/demo/playground/examples/pyproject.toml b/demo/playground/examples/pyproject.toml index bfc662bbd4..fd4b622227 100644 --- a/demo/playground/examples/pyproject.toml +++ b/demo/playground/examples/pyproject.toml @@ -6,10 +6,10 @@ authors = ["Jason Sherman "] [tool.poetry.dependencies] python = "^3.9" -pytest = "^7.4.0" -pytest-asyncio = "^0.21.0" +pytest = "^7.4.4" +pytest-asyncio = "^0.23.7" asynctest = "^0.13.0" -requests = "^2.31.0" +requests = "^2.32.3" [tool.poetry.dev-dependencies] diff --git a/demo/requirements.txt b/demo/requirements.txt index 7bfc16bbfc..e6013b0a46 100644 --- a/demo/requirements.txt +++ b/demo/requirements.txt @@ -1,5 +1,5 @@ -asyncpg~=0.26.0 +asyncpg~=0.29.0 prompt_toolkit~=2.0.9 web.py~=0.62 -pygments~=2.10 -qrcode[pil]~=6.1 +pygments~=2.18 +qrcode[pil]~=7.4 diff --git a/demo/runners/faber.py b/demo/runners/faber.py index b2d1a1e98c..04f1a9200b 100644 --- a/demo/runners/faber.py +++ b/demo/runners/faber.py @@ -29,7 +29,6 @@ prompt_loop, ) - CRED_PREVIEW_TYPE = "https://didcomm.org/issue-credential/2.0/credential-preview" SELF_ATTESTED = os.getenv("SELF_ATTESTED") TAILS_FILE_COUNT = int(os.getenv("TAILS_FILE_COUNT", 100)) @@ -587,17 +586,28 @@ async def main(args): options += " (D) Set Endorser's DID\n" if faber_agent.multitenant: options += " (W) Create and/or Enable Wallet\n" + options += " (U) Upgrade wallet to anoncreds \n" options += " (T) Toggle tracing on credential/proof exchange\n" options += " (X) Exit?\n[1/2/3/4/{}{}T/X] ".format( "5/6/7/8/" if faber_agent.revocation else "", "W/" if faber_agent.multitenant else "", ) + + upgraded_to_anoncreds = False async for option in prompt_loop( options.replace("%CRED_TYPE%", faber_agent.cred_type) ): if option is not None: option = option.strip() + # Anoncreds has different endpoints for revocation + is_anoncreds = False + if ( + faber_agent.agent.__dict__["wallet_type"] == "askar-anoncreds" + or upgraded_to_anoncreds + ): + is_anoncreds = True + if option is None or option in "xX": break @@ -891,11 +901,6 @@ async def main(args): await prompt("Publish now? [Y/N]: ", default="N") ).strip() in "yY" - # Anoncreds has different endpoints for revocation - is_anoncreds = False - if faber_agent.agent.__dict__["wallet_type"] == "askar-anoncreds": - is_anoncreds = True - try: endpoint = ( "/anoncreds/revocation/revoke" @@ -997,6 +1002,14 @@ async def main(args): ) except ClientError: pass + elif option in "uU" and faber_agent.multitenant: + log_status("Upgrading wallet to anoncreds. Wait a couple seconds...") + await faber_agent.agent.admin_POST( + "/anoncreds/wallet/upgrade", + params={"wallet_name": faber_agent.agent.wallet_name}, + ) + upgraded_to_anoncreds = True + await asyncio.sleep(2.0) if faber_agent.show_timing: timing = await faber_agent.agent.fetch_timing() diff --git a/demo/runners/support/agent.py b/demo/runners/support/agent.py index 153be98abb..ae03cde81d 100644 --- a/demo/runners/support/agent.py +++ b/demo/runners/support/agent.py @@ -785,7 +785,7 @@ async def register_or_switch_wallet( "wallet_name": target_wallet_name, "wallet_type": self.wallet_type, "label": target_wallet_name, - "wallet_webhook_urls": self.webhook_url, + "wallet_webhook_urls": [self.webhook_url], "wallet_dispatch_type": "both", } self.wallet_name = target_wallet_name diff --git a/demo/runners/support/utils.py b/demo/runners/support/utils.py index 77e5d7792f..b69d24b42d 100644 --- a/demo/runners/support/utils.py +++ b/demo/runners/support/utils.py @@ -115,7 +115,7 @@ def output_reader(handle, callback, *args, **kwargs): break try: run_in_terminal(functools.partial(callback, line, *args)) - except AssertionError as e: + except AssertionError: # see comment in DemoAgent.handle_output # trace log and prompt_toolkit do not get along... pass @@ -245,19 +245,7 @@ def progress(*args, **kwargs): def check_requires(args): wtype = args.wallet_type or "askar" - if wtype == "indy": - try: - from indy.libindy import _cdll - - _cdll() - except ImportError: - print("python3-indy module not installed") - sys.exit(1) - except OSError: - print("libindy shared library could not be loaded") - sys.exit(1) - - elif wtype == "askar": + if wtype == "askar": try: from aries_askar.bindings import get_library diff --git a/docs/deploying/ContainerImagesAndGithubActions.md b/docs/deploying/ContainerImagesAndGithubActions.md index bdc28e19d8..3dbdf275fc 100644 --- a/docs/deploying/ContainerImagesAndGithubActions.md +++ b/docs/deploying/ContainerImagesAndGithubActions.md @@ -22,22 +22,18 @@ Multiple variants are available; see [Tags](#tags). ACA-Py is a foundation for building decentralized identity applications; to this end, there are multiple variants of ACA-Py built to suit the needs of a variety -of environments and workflows. There are currently two main variants: +of environments and workflows. The following variants exist: - "Standard" - The default configuration of ACA-Py, including: - Aries Askar for secure storage - Indy VDR for Indy ledger communication - Indy Shared Libraries for AnonCreds -- "Indy" - The legacy configuration of ACA-Py, including: - - Indy SDK Wallet for secure storage - - Indy SDK Ledger for Indy ledger communication - - Indy SDK for AnonCreds -These two image variants are largely distinguished by providers for Indy Network -and AnonCreds support. The Standard variant is recommended for new projects. -Migration from an Indy based image (whether the new Indy image variant or the -original BC Gov images) to the Standard image is outside of the scope of this -document. +In the past, two image variants were published. These two variants are largely +distinguished by providers for Indy Network and AnonCreds support. The Standard +variant is recommended for new projects. Migration from an Indy based image +(whether the new Indy image variant or the original BC Gov images) to the +Standard image is outside of the scope of this document. The ACA-Py images built by this project are tagged to indicate which of the above variants it is. Other tags may also be generated for use by developers. @@ -48,8 +44,6 @@ Tag | Variant | Example | Description ------------------------|----------|--------------------------|-------------------------------------------------------------------------------------------------| py3.9-X.Y.Z | Standard | py3.9-0.7.4 | Standard image variant built on Python 3.9 for ACA-Py version X.Y.Z | py3.10-X.Y.Z | Standard | py3.10-0.7.4 | Standard image variant built on Python 3.10 for ACA-Py version X.Y.Z | -py3.9-indy-A.B.C-X.Y.Z | Indy | py3.9-indy-1.16.0-0.7.4 | Standard image variant built on Python 3.9 for ACA-Py version X.Y.Z and Indy SDK Version A.B.C | -py3.10-indy-A.B.C-X.Y.Z | Indy | py3.10-indy-1.16.0-0.7.4 | Standard image variant built on Python 3.10 for ACA-Py version X.Y.Z and Indy SDK Version A.B.C | ### Image Comparison @@ -63,7 +57,7 @@ variants and between the BC Gov ACA-Py images. - Uses container's system python environment rather than `pyenv` - Askar and Indy Shared libraries are installed as dependencies of ACA-Py through pip from pre-compiled binaries included in the python wrappers - Built from repo contents -- Indy Image +- Indy Image (no longer produced but included here for clarity) - Based on slim variant of Debian - Built from multi-stage build step (`indy-base` in the Dockerfile) which includes Indy dependencies; this could be replaced with an explicit `indy-python` image from the Indy SDK repo - Includes `libindy` but does **NOT** include the Indy CLI @@ -86,21 +80,16 @@ variants and between the BC Gov ACA-Py images. - Tests (`.github/workflows/tests.yml`) - A reusable workflow that runs tests for the Standard ACA-Py variant for a given python version. -- Tests (Indy) (`.github/workflows/tests-indy.yml`) - A reusable workflow that - runs tests for the Indy ACA-Py variant for a given python and indy version. - PR Tests (`.github/workflows/pr-tests.yml`) - Run on pull requests; runs tests - for the Standard and Indy ACA-Py variants for a "default" python version. - Check this workflow for the current default python and Indy versions in use. + for the Standard ACA-Py variant for a "default" python version. + Check this workflow for the current default python version in use. - Nightly Tests (`.github/workflows/nightly-tests.yml`) - Run nightly; runs - tests for the Standard and Indy ACA-Py variants for all currently supported + tests for the Standard ACA-Py variant for all currently supported python versions. Check this workflow for the set of currently supported - versions and Indy version(s) in use. + versions in use. - Publish (`.github/workflows/publish.yml`) - Run on new release published or when manually triggered; builds and pushes the Standard ACA-Py variant to the Github Container Registry. -- Publish (Indy) (`.github/workflows/publish-indy.yml`) - Run on new release - published or when manually triggered; builds and pushes the Indy ACA-Py - variant to the Github Container Registry. - Integration Tests (`.github/workflows/integrationtests.yml`) - Run on pull requests (to the hyperledger fork only); runs BDD integration tests. - Black Format (`.github/workflows/blackformat.yml`) - Run on pull requests; diff --git a/docs/design/UpgradeViaApi.md b/docs/design/UpgradeViaApi.md new file mode 100644 index 0000000000..0ddb890449 --- /dev/null +++ b/docs/design/UpgradeViaApi.md @@ -0,0 +1,103 @@ +# Upgrade via API Design + +#### To isolate an upgrade process and trigger it via API the following pattern was designed to handle multitenant scenarios. It includes an is_upgrading record in the wallet(DB) and a middleware to prevent requests during the upgrade process. + +#### The diagam below descripes the sequence of events for the anoncreds upgrade process which it was designed for, but the architecture can be used for any upgrade process. + +```mermaid +sequenceDiagram + participant A1 as Agent 1 + participant M1 as Middleware + participant IAS1 as IsAnoncredsSingleton Set + participant UIPS1 as UpgradeInProgressSingleton Set + participant W as Wallet (DB) + participant UIPS2 as UpgradeInProgressSingleton Set + participant IAS2 as IsAnoncredsSingleton Set + participant M2 as Middleware + participant A2 as Agent 2 + + Note over A1,A2: Start upgrade for non-anoncreds wallet + A1->>M1: POST /anoncreds/wallet/upgrade + M1-->>IAS1: check if wallet is in set + IAS1-->>M1: wallet is not in set + M1-->>UIPS1: check if wallet is in set + UIPS1-->>M1: wallet is not in set + M1->>A1: OK + A1-->>W: Add is_upgrading = anoncreds_in_progress record + A1->>A1: Upgrade wallet + A1-->>UIPS1: Add wallet to set + + Note over A1,A2: Attempted Requests During Upgrade + + Note over A1: Attempted Request + A1->>M1: GET /any-endpoint + M1-->>IAS1: check if wallet is in set + IAS1-->>M1: wallet is not in set + M1-->>UIPS1: check if wallet is in set + UIPS1-->>M1: wallet is in set + M1->>A1: 503 Service Unavailable + + Note over A2: Attempted Request + A2->>M2: GET /any-endpoint + M2-->>IAS2: check if wallet is in set + IAS2->>M2: wallet is not in set + M2-->>UIPS2: check if wallet is in set + UIPS2-->>M2: wallet is not in set + A2-->>W: Query is_upgrading = anoncreds_in_progress record + W-->>A2: record = anoncreds_in_progress + A2->>A2: Loop until upgrade is finished in seperate process + A2-->>UIPS2: Add wallet to set + M2->>A2: 503 Service Unavailable + + Note over A1,A2: Agent Restart During Upgrade + A1-->>W: Get is_upgrading record for wallet or all subwallets + W-->>A1: + A1->>A1: Resume upgrade if in progress + A1-->>UIPS1: Add wallet to set + + Note over A2: Same as Agent 1 + + Note over A1,A2: Upgrade Completes + + Note over A1: Finish Upgrade + A1-->>W: set is_upgrading = anoncreds_finished + A1-->>UIPS1: Remove wallet from set + A1-->>IAS1: Add wallet to set + A1->>A1: update subwallet or restart + + Note over A2: Detect Upgrade Complete + A2-->>W: Check is_upgrading = anoncreds_finished + W-->>A2: record = anoncreds_in_progress + A2->>A2: Wait 1 second + A2-->>W: Check is_upgrading = anoncreds_finished + W-->>A2: record = anoncreds_finished + A2-->>UIPS2: Remove wallet from set + A2-->>IAS2: Add wallet to set + A2->>A2: update subwallet or restart + + Note over A1,A2: Restarted Agents After Upgrade + + A1-->W: Get is_upgrading record for wallet or all subwallets + W-->>A1: + A1->>IAS1: Add wallet to set if record = anoncreds_finished + + Note over A2: Same as Agent 1 + + Note over A1,A2: Attempted Requests After Upgrade + + Note over A1: Attempted Request + A1->>M1: GET /any-endpoint + M1-->>IAS1: check if wallet is in set + IAS1-->>M1: wallet is in set + M1-->>A1: OK + + Note over A2: Same as Agent 1 +``` + + +##### An example of the implementation can be found via the anoncreds upgrade components. + - `aries_cloudagent/wallet/routes.py` in the `upgrade_anoncreds` controller + - the upgrade code in `wallet/anoncreds_upgrade.py` + - the middleware in `admin/server.py` in the `upgrade_middleware` function + - the singleton sets in `wallet/singletons.py` + - the startup process in `core/conductor.py` in the `check_for_wallet_upgrades_in_progress` function \ No newline at end of file diff --git a/docs/features/AnoncredsControllerMigration.md b/docs/features/AnoncredsControllerMigration.md new file mode 100644 index 0000000000..235bb797ee --- /dev/null +++ b/docs/features/AnoncredsControllerMigration.md @@ -0,0 +1,675 @@ +# Anoncreds Controller Migration + +To upgrade an agent to use anoncreds a controller should implement the required changes to endpoints and payloads in a way that is backwards compatible. The controller can then trigger the upgrade via the upgrade endpoint. + +## Step 1 - Endpoint Payload and Response Changes + +There is endpoint and payload changes involved with creating **schema, credential definition and revocation objects**. Your controller will need to implement these changes for any endpoints it uses. + +A good way to implement this with backwards compatibility is to get the wallet type via **/settings** and handle the existing endpoints when **wallet.type** is **askar** and the new anoncreds endpoints when **wallet.type** is **askar-anoncreds**. In this way the controller will handle both types of wallets in case the upgrade fails. After the upgrade is successful and stable the controller can be updated to only handle the new anoncreds endpoints. + +## Schemas + +#### Creating a Schema: + +- Change endpoint from **POST /schemas** to **POST /anoncreds/schema** +- Change payload and parameters from + +``` +params + - conn_id + - create_transaction_for_endorser +``` + +```json +{ + "attributes": ["score"], + "schema_name": "simple", + "schema_version": "1.0" +} +``` + +to + +```json +{ + "options": { + "create_transaction_for_endorser": false, + "endorser_connection_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6" + }, + "schema": { + "attrNames": ["score"], + "issuerId": "WgWxqztrNooG92RXvxSTWv", + "name": "Example schema", + "version": "1.0" + } +} +``` + +- options are not required +- **_issuerId_** is the public did to be used on the ledger +- The payload responses have changed + +**_Responses_** + +Without endorsement: + +```json +{ + "sent": { + "schema_id": "PzmGpSeCznzfPmv9B1EBqa:2:simple:1.0", + "schema": { + "ver": "1.0", + "id": "PzmGpSeCznzfPmv9B1EBqa:2:simple:1.0", + "name": "simple", + "version": "1.0", + "attrNames": ["score"], + "seqNo": 541 + } + }, + "schema_id": "PzmGpSeCznzfPmv9B1EBqa:2:simple:1.0", + "schema": { + "ver": "1.0", + "id": "PzmGpSeCznzfPmv9B1EBqa:2:simple:1.0", + "name": "simple", + "version": "1.0", + "attrNames": ["score"], + "seqNo": 541 + } +} +``` + +to + +```json +{ + "job_id": "string", + "registration_metadata": {}, + "schema_metadata": {}, + "schema_state": { + "schema": { + "attrNames": ["score"], + "issuerId": "WgWxqztrNooG92RXvxSTWv", + "name": "Example schema", + "version": "1.0" + }, + "schema_id": "WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0", + "state": "finished" + } +} +``` + +With endorsement: + +```json +{ + "sent": { + "schema": { + "attrNames": [ + "score" + ], + "id": "WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0", + "name": "schema_name", + "seqNo": 10, + "ver": "1.0", + "version": "1.0" + }, + "schema_id": "WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0" + }, + "txn": {...} +} +``` + +to + +```json +{ + "job_id": "12cb896d648242c8b9b0fff3b870ed00", + "schema_state": { + "state": "wait", + "schema_id": "RbyPM1EP8fKCrf28YsC1qK:2:simple:1.1", + "schema": { + "issuerId": "RbyPM1EP8fKCrf28YsC1qK", + "attrNames": [ + "score" + ], + "name": "simple", + "version": "1.1" + } + }, + "registration_metadata": { + "txn": {...} + }, + "schema_metadata": {} +} +``` + +#### Getting schemas: + +- Change endpoint from **GET /schemas/created** to **GET /anoncreds/schemas** +- Response payloads have no change + +#### Getting a schema: + +- Change endpoint from **GET /schemas/{schema_id}** to **GET /anoncreds/schema/{schema_id}** +- Response payload changed from + +```json +{ + "schema": { + "attrNames": ["score"], + "id": "WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0", + "name": "schema_name", + "seqNo": 10, + "ver": "1.0", + "version": "1.0" + } +} +``` + +to + +```json +{ + "resolution_metadata": {}, + "schema": { + "attrNames": ["score"], + "issuerId": "WgWxqztrNooG92RXvxSTWv", + "name": "Example schema", + "version": "1.0" + }, + "schema_id": "WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0", + "schema_metadata": {} +} +``` + +## Credential Definitions + +#### Creating a credential definition: + +- Change endpoint from **POST /credential-definitions** to **POST /anoncreds/credential-definition** +- Change payload and parameters from + +``` +params + - conn_id + - create_transaction_for_endorser +``` + +```json +{ + "revocation_registry_size": 1000, + "schema_id": "WgWxqztrNooG92RXvxSTWv:2:simple:1.0", + "support_revocation": true, + "tag": "default" +} +``` + +to + +```json +{ + "credential_definition": { + "issuerId": "WgWxqztrNooG92RXvxSTWv", + "schemaId": "WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0", + "tag": "default" + }, + "options": { + "create_transaction_for_endorser": false, + "endorser_connection_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "revocation_registry_size": 1000, + "support_revocation": true + } +} +``` + +- options are not required, revocation will default to false +- _**issuerId**_ is the public did to be used on the ledger +- _**schemaId**_ is the schema id on the ledger +- The payload responses have changed + +**_Responses_** + +Without Endoresment: + +```json +{ + "sent": { + "credential_definition_id": "CZGamdZoKhxiifjbdx3GHH:3:CL:558:default" + }, + "credential_definition_id": "CZGamdZoKhxiifjbdx3GHH:3:CL:558:default" +} +``` + +to + +```json +{ + "schema_state": { + "state": "finished", + "schema_id": "BpGaCdTwgEKoYWm6oPbnnj:2:simple:1.0", + "schema": { + "issuerId": "BpGaCdTwgEKoYWm6oPbnnj", + "attrNames": ["score"], + "name": "simple", + "version": "1.0" + } + }, + "registration_metadata": {}, + "schema_metadata": { + "seqNo": 555 + } +} +``` + +With Endorsement: + +```json +{ + "sent": { + "credential_definition_id": "WgWxqztrNooG92RXvxSTWv:3:CL:20:tag" + }, + "txn": {...} +} +``` + +```json +{ + "job_id": "7082e58aa71d4817bb32c3778596b012", + "credential_definition_state": { + "state": "wait", + "credential_definition_id": "RbyPM1EP8fKCrf28YsC1qK:3:CL:547:default", + "credential_definition": { + "issuerId": "RbyPM1EP8fKCrf28YsC1qK", + "schemaId": "RbyPM1EP8fKCrf28YsC1qK:2:simple:1.1", + "type": "CL", + "tag": "default", + "value": { + "primary": {...}, + "revocation": {...} + } + } + }, + "registration_metadata": { + "txn": {...} + }, + "credential_definition_metadata": {} +} +``` + +#### Getting credential definitions: + +- Change endpoint from **GET /credential-definitons/created** to **GET /anoncreds/credential-defintions** +- Response payloads have no change + +#### Getting a credential definition: + +- Change endpoint from **GET /credential-definitons/{schema_id}** to **GET /anoncreds/credential-defintion/{cred_def_id}** +- Response payload changed from + +```json +{ + "credential_definition": { + "id": "WgWxqztrNooG92RXvxSTWv:3:CL:20:tag", + "schemaId": "20", + "tag": "tag", + "type": "CL", + "value": {...}, + "revocation": {...} + }, + "ver": "1.0" + } +} +``` + +to + +```json +{ + "credential_definition": { + "issuerId": "WgWxqztrNooG92RXvxSTWv", + "schemaId": "WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0", + "tag": "default", + "type": "CL", + "value": {...}, + "revocation": {...} + } + }, + "credential_definition_id": "WgWxqztrNooG92RXvxSTWv:3:CL:20:tag", + "credential_definitions_metadata": {}, + "resolution_metadata": {} +} +``` + +## Revocation + +Most of the changes with revocation endpoints only require prepending `/anoncreds` to the path. There are some other subtle changes listed below. + +#### Create and publish registry definition + +- The endpoints **POST /revocation/create-registry** and **POST /revocation/registry/{rev_reg_id}/definition** have been replaced by the single endpoint **POST /anoncreds/revocation-registry-definition** +- Instead of creating the registry with **POST /revocation/create-registry** and payload + +```json +{ + "credential_definition_id": "WgWxqztrNooG92RXvxSTWv:3:CL:20:tag", + "max_cred_num": 1000 +} +``` + +- And then publishing with **POST /revocation/registry/{rev_reg_id}/definition** + +``` +params + - conn_id + - create_transaction_for_endorser +``` + +- Use **POST /anoncreds/revocation-registry-definition** with payload + +```json +{ + "options": { + "create_transaction_for_endorser": false, + "endorser_connection_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6" + }, + "revocation_registry_definition": { + "credDefId": "WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0", + "issuerId": "WgWxqztrNooG92RXvxSTWv", + "maxCredNum": 777, + "tag": "default" + } +} +``` + +- options are not required, revocation will default to false +- _**issuerId**_ is the public did to be used on the ledger +- _**credDefId**_ is the cred def id on the ledger +- The payload responses have changed + +**_Responses_** + +Without endorsement: + +```json +{ + "sent": { + "revocation_registry_id": "CZGamdZoKhxiifjbdx3GHH:4:CL:558:default" + }, + "revocation_registry_id": "CZGamdZoKhxiifjbdx3GHH:4:CL:558:default" +} +``` + +to + +```json +{ + "revocation_registry_definition_state": { + "state": "finished", + "revocation_registry_definition_id": "BpGaCdTwgEKoYWm6oPbnnj:4:BpGaCdTwgEKoYWm6oPbnnj:3:CL:555:default:CL_ACCUM:default", + "revocation_registry_definition": { + "issuerId": "BpGaCdTwgEKoYWm6oPbnnj", + "revocDefType": "CL_ACCUM", + "credDefId": "BpGaCdTwgEKoYWm6oPbnnj:3:CL:555:default", + "tag": "default", + "value": {...} + } + }, + "registration_metadata": {}, + "revocation_registry_definition_metadata": { + "seqNo": 569 + } +} +``` + +With endorsement: + +```json +{ + "sent": { + "result": { + "created_at": "2021-12-31T23:59:59Z", + "cred_def_id": "WgWxqztrNooG92RXvxSTWv:3:CL:20:tag", + "error_msg": "Revocation registry undefined", + "issuer_did": "WgWxqztrNooG92RXvxSTWv", + "max_cred_num": 1000, + "pending_pub": [ + "23" + ], + "record_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "revoc_def_type": "CL_ACCUM", + "revoc_reg_def": { + "credDefId": "WgWxqztrNooG92RXvxSTWv:3:CL:20:tag", + "id": "WgWxqztrNooG92RXvxSTWv:4:WgWxqztrNooG92RXvxSTWv:3:CL:20:tag:CL_ACCUM:0", + "revocDefType": "CL_ACCUM", + "tag": "string", + "value": {...}, + "ver": "1.0" + }, + "revoc_reg_entry": {...}, + "revoc_reg_id": "WgWxqztrNooG92RXvxSTWv:4:WgWxqztrNooG92RXvxSTWv:3:CL:20:tag:CL_ACCUM:0", + "state": "active", + "tag": "string", + "tails_hash": "H3C2AVvLMv6gmMNam3uVAjZpfkcJCwDwnZn6z3wXmqPV", + "tails_local_path": "string", + "tails_public_uri": "string", + "updated_at": "2021-12-31T23:59:59Z" + } + }, + "txn": {...} +} +``` + +to + +```json +{ + "job_id": "25dac53a1fb84cb8a5bf1b4362fbca11", + "revocation_registry_definition_state": { + "state": "wait", + "revocation_registry_definition_id": "RbyPM1EP8fKCrf28YsC1qK:4:RbyPM1EP8fKCrf28YsC1qK:3:CL:547:default:CL_ACCUM:default", + "revocation_registry_definition": { + "issuerId": "RbyPM1EP8fKCrf28YsC1qK", + "revocDefType": "CL_ACCUM", + "credDefId": "RbyPM1EP8fKCrf28YsC1qK:3:CL:547:default", + "tag": "default", + "value": {...} + } + }, + "registration_metadata": { + "txn": {...} + }, + "revocation_registry_definition_metadata": {} +} +``` + +#### Send revocation entry or list to ledger + +- Changes from **POST /revocation/registry/{rev_reg_id}/entry** to **POST /anoncreds/revocation-list** +- Change from + +``` +params + - conn_id + - create_transaction_for_endorser +``` + +to + +```json +{ + "options": { + "create_transaction_for_endorser": false, + "endorser_connection_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6" + }, + "rev_reg_def_id": "WgWxqztrNooG92RXvxSTWv:4:WgWxqztrNooG92RXvxSTWv:3:CL:20:tag:CL_ACCUM:0" +} +``` + +- options are not required +- _**rev_reg_def_id**_ is the revocation registry definition id on the ledger +- The payload responses have changed + +**_Responses_** + +Without endorsement: + +```json +{ + "sent": { + "revocation_registry_id": "BpGaCdTwgEKoYWm6oPbnnj:4:BpGaCdTwgEKoYWm6oPbnnj:3:CL:555:default:CL_ACCUM:default" + }, + "revocation_registry_id": "BpGaCdTwgEKoYWm6oPbnnj:4:BpGaCdTwgEKoYWm6oPbnnj:3:CL:555:default:CL_ACCUM:default" +} +``` + +to + +```json + +``` + +#### Get current active registry: + +- Change from **GET /revocation/active-registry/{cred_def_id}** to **GET /anoncreds/revocation/active-registry/{cred_def_id}** +- No payload changes + +#### Rotate active registry + +- Change from **POST /revocation/active-registry/{cred_def_id}/rotate** to **POST /anoncreds/revocation/active-registry/{cred_def_id}/rotate** +- No payload changes + +#### Get credential revocation status + +- Change from **GET /revocation/credential-record** to **GET /anoncreds/revocation/credential-record** +- No payload changes + +#### Publish revocations + +- Change from **POST /revocation/publish-revocations** to **POST /anoncreds/revocation/publish-revocations** +- Change payload and parameters from + +``` +params + - conn_id + - create_transaction_for_endorser +``` + +```json +{ + "rrid2crid": { + "additionalProp1": ["12345"], + "additionalProp2": ["12345"], + "additionalProp3": ["12345"] + } +} +``` + +to + +```json +{ + "options": { + "create_transaction_for_endorser": false, + "endorser_connection_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6" + }, + "rrid2crid": { + "additionalProp1": ["12345"], + "additionalProp2": ["12345"], + "additionalProp3": ["12345"] + } +} +``` + +- options are not required + +#### Get registries + +- Change from **GET /revocation/registries/created** to **GET /anoncreds/revocation/registries** +- No payload changes + +#### Get registry + +- Changes from **GET /revocation/registry/{rev_reg_id}** to **GET /anoncreds/revocation/registry/{rev_reg_id}** +- No payload changes + +#### Fix reocation state + +- Changes from **POST /revocation/registry/{rev_reg_id}/fix-revocation-entry-state** to **POST /anoncreds/revocation/registry/{rev_reg_id}/fix-revocation-state** +- No payload changes + +#### Get number of issued credentials + +- Changes from **GET /revocation/registry/{rev_reg_id}/issued** to **GET /anoncreds/revocation/registry/{rev_reg_id}/issued** +- No payload changes + +#### Get credential details + +- Changes from **GET /revocation/registry/{rev_reg_id}/issued/details** to **GET /anoncreds/revocation/registry/{rev_reg_id}/issued/details** +- No payload changes + +#### Get revoked credential details + +- Changes from **GET /revocation/registry/{rev_reg_id}/issued/indy_recs** to **GET /anoncreds/revocation/registry/{rev_reg_id}/issued/indy_recs** +- No payload changes + +#### Set state manually + +- Changes from **PATCH /revocation/registry/{rev_reg_id}/set-state** to **PATCH /anoncreds/revocation/registry/{rev_reg_id}/set-state** +- No payload changes + +#### Upload tails file + +- Changes from **PUT /revocation/registry/{rev_reg_id}/tails-file** to **PUT /anoncreds/registry/{rev_reg_id}/tails-file** +- No payload changes + +#### Download tails file + +- Changes from **GET /revocation/registry/{rev_reg_id}/tails-file** to **GET /anoncreds/revocation/registry/{rev_reg_id}/tails-file** +- No payload changes + +#### Revoke a credential + +- Changes from **POST /revocation/revoke** to **POST /anoncreds/revocation/revoke** +- Change payload and parameters from + +#### Clear pending revocations + +- **POST /revocation/clear-pending-revocations** has been removed. + +#### Delete tails file + +- Endpoint **DELETE /revocation/delete-tails-server** has been removed + +#### Update tails file + +- Endpoint **PATCH /revocation/registry/{rev_reg_id}** has been removed + +#### Additional Endpoints + +- **PUT /anoncreds/registry/{rev_reg_id}/active** is available to set the active registry + +## Step 2 - Trigger the upgrade via the upgrade endpoint + +The upgrade endpoint is at **POST /anoncreds/wallet/upgrade**. + +You need to be careful doing this, as there is no way to downgrade the wallet. It is recommended highly recommended to back-up any wallets and to test the upgrade in a development environment before upgrading a production wallet. + +Params: `wallet_name` is the name of the wallet to upgrade. Used to prevent accidental upgrades. + +The behavior for a base wallet (standalone) or admin wallet in multitenant mode is slightly different from the behavior of a subwallet (or tenant) in multitenancy mode. However, the upgrade process is the same. + +1. Backup the wallet +2. Scale down any controller instances on old endpoints +3. Call the upgrade endpoint +4. Scale up the controller instances to handle new endpoints + +### Base wallet (standalone) or admin wallet in multitenant mode: + +The agent will get a 503 error during the upgrade process. Any agent instance will shut down when the upgrade is complete. It is up to the aca-py agent to start up again. After the upgrade is complete the old endpoints will no longer be available and result in a 400 error. + +The aca-py agent will work after the restart. However, it will receive a warning for having the wrong wallet type configured. It is recommended to change the `wallet-type` to `askar-anoncreds` in the agent configuration file or start-up command. + +### Subwallet (tenant) in multitenancy mode: + +The sub-tenant which is in the process of being upgraded will get a 503 error during the upgrade process. All other sub-tenants will continue to operate normally. After the upgrade is complete the sub-tenant will be able to use the new endpoints. The old endpoints will no longer be available and result in a 403 error. Any aca-py agents will remain running after the upgrade and it's not required that the aca-py agent restarts. diff --git a/docs/requirements.txt b/docs/requirements.txt index 33bcde9d50..87124ff7ab 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,4 +1,4 @@ # Defining the exact versions for ReadTheDocs that will make sure things don't break sphinx==5.3.0 -sphinx_rtd_theme==1.1.1 +sphinx_rtd_theme==1.3.0 readthedocs-sphinx-search==0.3.2 \ No newline at end of file diff --git a/docs/testing/UnitTests.md b/docs/testing/UnitTests.md index 786517f132..a167128a13 100644 --- a/docs/testing/UnitTests.md +++ b/docs/testing/UnitTests.md @@ -251,7 +251,7 @@ async def receive_invitation( function.`assert_called_once()` - pytest.mark setup in `setup.cfg` - can be attributed at function or class level. Example, `@pytest.mark.indy` + can be attributed at function or class level. Example, `@pytest.mark.askar` - Code coverage ![Code coverage screenshot](https://i.imgur.com/VhNYcje.png) diff --git a/poetry.lock b/poetry.lock index 6659de5cf9..9a829c3316 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,88 +1,88 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiohttp" -version = "3.9.4" +version = "3.9.5" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, - {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, - {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, - {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, - {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, - {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, - {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, - {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, - {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, - {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, - {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, - {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, - {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, - {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, - {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, - {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, - {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, - {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, - {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, - {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, - {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, - {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, - {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, - {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, - {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, - {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, + {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, + {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, + {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, + {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, + {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, + {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, + {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, + {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, + {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, ] [package.dependencies] @@ -98,19 +98,24 @@ speedups = ["Brotli", "aiodns", "brotlicffi"] [[package]] name = "aiohttp-apispec" -version = "2.2.3" +version = "3.0.1" description = "Build and document REST APIs with aiohttp and apispec" optional = false -python-versions = ">=3.5" -files = [ - {file = "aiohttp-apispec-2.2.3.tar.gz", hash = "sha256:d70431e5f3ef5c6dc96dc9180ce10ddfd78fa054f178af8259707eb6d421ed05"}, -] +python-versions = ">=3.9" +files = [] +develop = false [package.dependencies] -aiohttp = ">=3.0.1,<4.0" -apispec = ">=3.0.0,<4.0" -jinja2 = "*" -webargs = "<6.0" +aiohttp = ">=3.9.4,<4.0" +apispec = ">=6.6.1,<6.7.0" +jinja2 = ">=3.1.3,<3.2.0" +webargs = ">=8.4.0,<8.5.0" + +[package.source] +type = "git" +url = "https://github.com/ff137/aiohttp-apispec.git" +reference = "v3.0.1" +resolved_reference = "a2b67c0d26f6d3bf0234a8cd134f699144239eeb" [[package]] name = "aiohttp-cors" @@ -151,6 +156,17 @@ files = [ {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + [[package]] name = "anoncreds" version = "0.2.0" @@ -166,21 +182,23 @@ files = [ [[package]] name = "apispec" -version = "3.3.2" +version = "6.6.1" description = "A pluggable API specification generator. Currently supports the OpenAPI Specification (f.k.a. the Swagger specification)." optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "apispec-3.3.2-py2.py3-none-any.whl", hash = "sha256:a1df9ec6b2cd0edf45039ef025abd7f0660808fa2edf737d3ba1cf5ef1a4625b"}, - {file = "apispec-3.3.2.tar.gz", hash = "sha256:d23ebd5b71e541e031b02a19db10b5e6d5ef8452c552833e3e1afc836b40b1ad"}, + {file = "apispec-6.6.1-py3-none-any.whl", hash = "sha256:6460315cb38ac6a2ff42d9e2b8dc0435c37d4428d3abeda96ff97b5dc8eb6b94"}, + {file = "apispec-6.6.1.tar.gz", hash = "sha256:f5caa47cee75fe03b9c50b5594048b4c052eeca2c212e0dac12dbb6175d9a659"}, ] +[package.dependencies] +packaging = ">=21.3" + [package.extras] -dev = ["PyYAML (>=3.10)", "flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "marshmallow (>=2.19.2)", "mock", "prance[osv] (>=0.11)", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] -docs = ["marshmallow (>=2.19.2)", "pyyaml (==5.3.1)", "sphinx (==3.2.1)", "sphinx-issues (==1.2.0)", "sphinx-rtd-theme (==0.5.0)"] -lint = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "pre-commit (>=2.4,<3.0)"] -tests = ["PyYAML (>=3.10)", "marshmallow (>=2.19.2)", "mock", "prance[osv] (>=0.11)", "pytest"] -validation = ["prance[osv] (>=0.11)"] +dev = ["apispec[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["apispec[marshmallow]", "pyyaml (==6.0.1)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-rtd-theme (==2.0.0)"] +marshmallow = ["marshmallow (>=3.18.0)"] +tests = ["apispec[marshmallow,yaml]", "openapi-spec-validator (==0.7.1)", "pytest"] yaml = ["PyYAML (>=3.10)"] [[package]] @@ -231,13 +249,13 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p [[package]] name = "babel" -version = "2.14.0" +version = "2.15.0" description = "Internationalization utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, - {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, + {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, + {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, ] [package.extras] @@ -259,33 +277,33 @@ tests = ["PyHamcrest (>=2.0.2)", "mypy", "pytest (>=4.6)", "pytest-benchmark", " [[package]] name = "black" -version = "24.3.0" +version = "24.4.2" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"}, - {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"}, - {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"}, - {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"}, - {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"}, - {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"}, - {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"}, - {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"}, - {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"}, - {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"}, - {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"}, - {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"}, - {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"}, - {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"}, - {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"}, - {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"}, - {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"}, - {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"}, - {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"}, - {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"}, - {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"}, - {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"}, + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, ] [package.dependencies] @@ -327,13 +345,13 @@ files = [ [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.6.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, + {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, ] [[package]] @@ -552,63 +570,63 @@ yaml = ["PyYAML"] [[package]] name = "coverage" -version = "7.4.3" +version = "7.5.3" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"}, - {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"}, - {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"}, - {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, - {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, - {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, - {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, - {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"}, - {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"}, - {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"}, - {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"}, - {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"}, - {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, - {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, + {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, + {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, + {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, + {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, + {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, + {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, + {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, + {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, + {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, + {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, + {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, + {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, + {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, + {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, ] [package.dependencies] @@ -619,43 +637,43 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.5" +version = "42.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, - {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, - {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, - {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, - {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, - {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, - {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, + {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a987f840718078212fdf4504d0fd4c6effe34a7e4740378e59d47696e8dfb477"}, + {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd13b5e9b543532453de08bcdc3cc7cebec6f9883e886fd20a92f26940fd3e7a"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a79165431551042cc9d1d90e6145d5d0d3ab0f2d66326c201d9b0e7f5bf43604"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a47787a5e3649008a1102d3df55424e86606c9bae6fb77ac59afe06d234605f8"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:02c0eee2d7133bdbbc5e24441258d5d2244beb31da5ed19fbb80315f4bbbff55"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e44507bf8d14b36b8389b226665d597bc0f18ea035d75b4e53c7b1ea84583cc"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7f8b25fa616d8b846aef64b15c606bb0828dbc35faf90566eb139aa9cff67af2"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:93a3209f6bb2b33e725ed08ee0991b92976dfdcf4e8b38646540674fc7508e13"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e6b8f1881dac458c34778d0a424ae5769de30544fc678eac51c1c8bb2183e9da"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3de9a45d3b2b7d8088c3fbf1ed4395dfeff79d07842217b38df14ef09ce1d8d7"}, + {file = "cryptography-42.0.7-cp37-abi3-win32.whl", hash = "sha256:789caea816c6704f63f6241a519bfa347f72fbd67ba28d04636b7c6b7da94b0b"}, + {file = "cryptography-42.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:8cb8ce7c3347fcf9446f201dc30e2d5a3c898d009126010cbd1f443f28b52678"}, + {file = "cryptography-42.0.7-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:a3a5ac8b56fe37f3125e5b72b61dcde43283e5370827f5233893d461b7360cd4"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:779245e13b9a6638df14641d029add5dc17edbef6ec915688f3acb9e720a5858"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d563795db98b4cd57742a78a288cdbdc9daedac29f2239793071fe114f13785"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:31adb7d06fe4383226c3e963471f6837742889b3c4caa55aac20ad951bc8ffda"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:efd0bf5205240182e0f13bcaea41be4fdf5c22c5129fc7ced4a0282ac86998c9"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a9bc127cdc4ecf87a5ea22a2556cab6c7eda2923f84e4f3cc588e8470ce4e42e"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3577d029bc3f4827dd5bf8bf7710cac13527b470bbf1820a3f394adb38ed7d5f"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2e47577f9b18723fa294b0ea9a17d5e53a227867a0a4904a1a076d1646d45ca1"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1a58839984d9cb34c855197043eaae2c187d930ca6d644612843b4fe8513c886"}, + {file = "cryptography-42.0.7-cp39-abi3-win32.whl", hash = "sha256:e6b79d0adb01aae87e8a44c2b64bc3f3fe59515280e00fb6d57a7267a2583cda"}, + {file = "cryptography-42.0.7-cp39-abi3-win_amd64.whl", hash = "sha256:16268d46086bb8ad5bf0a2b5544d8a9ed87a0e33f5e77dd3c3301e63d941a83b"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2954fccea107026512b15afb4aa664a5640cd0af630e2ee3962f2602693f0c82"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:362e7197754c231797ec45ee081f3088a27a47c6c01eff2ac83f60f85a50fe60"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f698edacf9c9e0371112792558d2f705b5645076cc0aaae02f816a0171770fd"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5482e789294854c28237bba77c4c83be698be740e31a3ae5e879ee5444166582"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e9b2a6309f14c0497f348d08a065d52f3020656f675819fc405fb63bbcd26562"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d8e3098721b84392ee45af2dd554c947c32cc52f862b6a3ae982dbb90f577f14"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c65f96dad14f8528a447414125e1fc8feb2ad5a272b8f68477abbcc1ea7d94b9"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:36017400817987670037fbb0324d71489b6ead6231c9604f8fc1f7d008087c68"}, + {file = "cryptography-42.0.7.tar.gz", hash = "sha256:ecbfbc00bf55888edda9868a4cf927205de8499e7fabe6c050322298382953f2"}, ] [package.dependencies] @@ -842,24 +860,24 @@ files = [ [[package]] name = "docutils" -version = "0.20.1" +version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, - {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, ] [[package]] name = "ecdsa" -version = "0.16.1" +version = "0.19.0" description = "ECDSA cryptographic signature library (pure python)" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.6" files = [ - {file = "ecdsa-0.16.1-py2.py3-none-any.whl", hash = "sha256:881fa5e12bb992972d3d1b3d4dfbe149ab76a89f13da02daa5ea1ec7dea6e747"}, - {file = "ecdsa-0.16.1.tar.gz", hash = "sha256:cfc046a2ddd425adbd1a78b3c46f0d1325c657811c0f45ecc3a0a6236c1e50ff"}, + {file = "ecdsa-0.19.0-py2.py3-none-any.whl", hash = "sha256:2cea9b88407fdac7bbeca0833b189e4c9c53f2ef1e1eaa29f6224dbc809b707a"}, + {file = "ecdsa-0.19.0.tar.gz", hash = "sha256:60eaad1199659900dd0af521ed462b793bbdf867432b3948e87416ae4caf6bf8"}, ] [package.dependencies] @@ -889,13 +907,13 @@ test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "eth-typing" -version = "4.0.0" +version = "4.2.3" description = "eth-typing: Common type annotations for ethereum python packages" optional = false -python-versions = ">=3.8, <4" +python-versions = "<4,>=3.8" files = [ - {file = "eth-typing-4.0.0.tar.gz", hash = "sha256:9af0b6beafbc5c2e18daf19da5f5a68315023172c4e79d149e12ad10a3d3f731"}, - {file = "eth_typing-4.0.0-py3-none-any.whl", hash = "sha256:7e556bea322b6e8c0a231547b736c258e10ce9eed5ddc254f51031b12af66a16"}, + {file = "eth_typing-4.2.3-py3-none-any.whl", hash = "sha256:b2df49fa89d2e85f2cc3fb1c903b0cd183d524f7a045e3db8cc720cf41adcd3d"}, + {file = "eth_typing-4.2.3.tar.gz", hash = "sha256:8ee3ae7d4136d14fcb955c34f9dbef8e52170984d4dc68c0ab0d61621eab29d8"}, ] [package.extras] @@ -905,13 +923,13 @@ test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "eth-utils" -version = "4.0.0" +version = "4.1.1" description = "eth-utils: Common utility functions for python code that interacts with Ethereum" optional = false -python-versions = ">=3.8, <4" +python-versions = "<4,>=3.8" files = [ - {file = "eth-utils-4.0.0.tar.gz", hash = "sha256:58f9c57900e0f430be728a5e976dc6ed51f493a61e8a4ff1f73c043832cd4f2f"}, - {file = "eth_utils-4.0.0-py3-none-any.whl", hash = "sha256:38d0a5a4b5bb8f2e583f040ede678c47d9eae57a058a11895271a947853947a0"}, + {file = "eth_utils-4.1.1-py3-none-any.whl", hash = "sha256:ccbbac68a6d65cb6e294c5bcb6c6a5cec79a241c56dc5d9c345ed788c30f8534"}, + {file = "eth_utils-4.1.1.tar.gz", hash = "sha256:71c8d10dec7494aeed20fa7a4d52ec2ce4a2e52fdce80aab4f5c3c19f3648b25"}, ] [package.dependencies] @@ -921,19 +939,19 @@ eth-typing = ">=3.0.0" toolz = {version = ">0.8.2", markers = "implementation_name == \"pypy\""} [package.extras] -dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "eth-hash[pycryptodome]", "hypothesis (>=4.43.0)", "ipython", "mypy (==1.5.1)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] -docs = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] +dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "eth-hash[pycryptodome]", "hypothesis (>=4.43.0)", "ipython", "mypy (==1.5.1)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] +docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] test = ["hypothesis (>=4.43.0)", "mypy (==1.5.1)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.1" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, ] [package.extras] @@ -941,63 +959,60 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.13.1" +version = "3.14.0" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, - {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, + {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, + {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] name = "frozendict" -version = "2.4.0" +version = "2.4.4" description = "A simple immutable dictionary" optional = false python-versions = ">=3.6" files = [ - {file = "frozendict-2.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:475c65202a6f5421df8cacb8a2f29c5087134a0542b0540ae95fbf4db7af2ff9"}, - {file = "frozendict-2.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2607e82efdd2c277224a58bda3994d4cd48e49eff7fa31e404cf3066e8dbfeae"}, - {file = "frozendict-2.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fd4583194baabe100c135883017da76259a315d34e303eddf198541b7e02e44"}, - {file = "frozendict-2.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efca7281184b54f7abab6980cf25837b709f72ced62791f62dabcd7b184d958a"}, - {file = "frozendict-2.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fc4cba1ced988ce9020dfcaae6fe3f5521eebc00c5772b511aaf691b0be91e6"}, - {file = "frozendict-2.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8fab616e7c0fea2ac928f107c740bd9ba516fc083adfcd1c391d6bfc9164403d"}, - {file = "frozendict-2.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:09ba8ee37d260adde311b8eb4cd12bf27f64071242f736757ae6a11d331eb860"}, - {file = "frozendict-2.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:0615ed71570eec3cc96df063930ea6e563211efeeac86e3f3cc8bdfc9c9bfab7"}, - {file = "frozendict-2.4.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc754117a7d60ba8e55b3c39abd67f37fbc05dd63cdcb03d1717a382fe0a3421"}, - {file = "frozendict-2.4.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2804ea4bd2179bb33b99483cc8d69246630cc00632b9affe2914e8666f1cc7e5"}, - {file = "frozendict-2.4.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd4700c3f0aebdc8f4375c35590135794b1dbf2aca132f4756b584fa9910af2d"}, - {file = "frozendict-2.4.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:da4406d95c340e0b1cc43a3858fac729f52689325bcf61a9182eb94aff7451dc"}, - {file = "frozendict-2.4.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1875e7b70a5724bf964354da8fd542240d2cead0d80053ac96bf4494ce3517fa"}, - {file = "frozendict-2.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a60f353496637ca21396289a7d969af1eb4ec4d11a7c37a0e7f25fc1761a0c97"}, - {file = "frozendict-2.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b666f9c6c8a9e794d2713a944b10a65480ff459579d75b5f686c75031c2c2dfc"}, - {file = "frozendict-2.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d81fb396ea81fcba3b3dde4a4b51adcb74ff31632014fbfd030f8acd5a7292"}, - {file = "frozendict-2.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4925c8e82d2bd23d45996cd0827668a52b9c51103897c98ce409a763d0c00c61"}, - {file = "frozendict-2.4.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aa86325da6a6071284b4ed3d9d2cd9db068560aebad503b658d6a889a0575683"}, - {file = "frozendict-2.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5bb5b62d4e2bce12e91800496d94de41bec8f16e4d8a7b16e8f263676ae2031a"}, - {file = "frozendict-2.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3909df909516cfd7bcefd9a3003948970a12a50c5648d8bbddafcef171f2117f"}, - {file = "frozendict-2.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:204f2c5c10fc018d1ba8ccc67758aa83fe769c782547bd26dc250317a7ccba71"}, - {file = "frozendict-2.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8d1d269874c94b1ed2b6667e5e43dcf4541838019b1caa4c48f848ac73634df"}, - {file = "frozendict-2.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:809f1cffb602cf06e5186c69c0e3b74bec7a3684593145331f9aa2a65b5ba3b7"}, - {file = "frozendict-2.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b017cba5f73869b04c2977139ad08e57a7480de1e384c34193939698119baa1d"}, - {file = "frozendict-2.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0b75e5e231621dedaef88334997e79fbd137dd89895543d3862fe0220fc3572c"}, - {file = "frozendict-2.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:df3819a5d48ab3aae1548e62093d0111ad7c3b62ff9392421b7bbf149c08b629"}, - {file = "frozendict-2.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:42a9b33ccf9d417b22146e59803c53d5c39d7d9151d2df8df59c235f6a1a5ed7"}, - {file = "frozendict-2.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3f51bfa64e0c4a6608e3f2878bab1211a6b3b197de6fa57151bbe73f1184457"}, - {file = "frozendict-2.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a1d232f092dc686e6ef23d436bde30f82c018f31cef1b89b31caef03814b1617"}, - {file = "frozendict-2.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e530658134e88607ff8c2c8934a07b2bb5e9fffab5045f127746f6542c6c77e"}, - {file = "frozendict-2.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23a52bbea30c9e35b89291273944393770fb031e522a172e3aff19b62cc50047"}, - {file = "frozendict-2.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f91acaff475d0ef0d3436b805c9b91fc627a6a8a281771a24f7ab7f458a0b34f"}, - {file = "frozendict-2.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:08d9c7c1aa92b94538b3a79c43999f999012e174588435f197794d5e5a80e0f5"}, - {file = "frozendict-2.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:05c5a77957ecba4286c7ab33861a8f4f2badc7ea86fc82b834fb360d3aa4c108"}, - {file = "frozendict-2.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:c8af8a6a39e0050d3f3193cda56c42b43534a9b3995c44241bb9527e3c3fd451"}, - {file = "frozendict-2.4.0.tar.gz", hash = "sha256:c26758198e403337933a92b01f417a8240c954f553e1d4b5e0f8e39d9c8e3f0a"}, + {file = "frozendict-2.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4a59578d47b3949437519b5c39a016a6116b9e787bb19289e333faae81462e59"}, + {file = "frozendict-2.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12a342e439aef28ccec533f0253ea53d75fe9102bd6ea928ff530e76eac38906"}, + {file = "frozendict-2.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f79c26dff10ce11dad3b3627c89bb2e87b9dd5958c2b24325f16a23019b8b94"}, + {file = "frozendict-2.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2bd009cf4fc47972838a91e9b83654dc9a095dc4f2bb3a37c3f3124c8a364543"}, + {file = "frozendict-2.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:87ebcde21565a14fe039672c25550060d6f6d88cf1f339beac094c3b10004eb0"}, + {file = "frozendict-2.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:fefeb700bc7eb8b4c2dc48704e4221860d254c8989fb53488540bc44e44a1ac2"}, + {file = "frozendict-2.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:4297d694eb600efa429769125a6f910ec02b85606f22f178bafbee309e7d3ec7"}, + {file = "frozendict-2.4.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:812ab17522ba13637826e65454115a914c2da538356e85f43ecea069813e4b33"}, + {file = "frozendict-2.4.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fee9420475bb6ff357000092aa9990c2f6182b2bab15764330f4ad7de2eae49"}, + {file = "frozendict-2.4.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3148062675536724502c6344d7c485dd4667fdf7980ca9bd05e338ccc0c4471e"}, + {file = "frozendict-2.4.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:78c94991944dd33c5376f720228e5b252ee67faf3bac50ef381adc9e51e90d9d"}, + {file = "frozendict-2.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:1697793b5f62b416c0fc1d94638ec91ed3aa4ab277f6affa3a95216ecb3af170"}, + {file = "frozendict-2.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:199a4d32194f3afed6258de7e317054155bc9519252b568d9cfffde7e4d834e5"}, + {file = "frozendict-2.4.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85375ec6e979e6373bffb4f54576a68bf7497c350861d20686ccae38aab69c0a"}, + {file = "frozendict-2.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2d8536e068d6bf281f23fa835ac07747fb0f8851879dd189e9709f9567408b4d"}, + {file = "frozendict-2.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:259528ba6b56fa051bc996f1c4d8b57e30d6dd3bc2f27441891b04babc4b5e73"}, + {file = "frozendict-2.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:07c3a5dee8bbb84cba770e273cdbf2c87c8e035903af8f781292d72583416801"}, + {file = "frozendict-2.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6874fec816b37b6eb5795b00e0574cba261bf59723e2de607a195d5edaff0786"}, + {file = "frozendict-2.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8f92425686323a950337da4b75b4c17a3327b831df8c881df24038d560640d4"}, + {file = "frozendict-2.4.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d58d9a8d9e49662c6dafbea5e641f97decdb3d6ccd76e55e79818415362ba25"}, + {file = "frozendict-2.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:93a7b19afb429cbf99d56faf436b45ef2fa8fe9aca89c49eb1610c3bd85f1760"}, + {file = "frozendict-2.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b70b431e3a72d410a2cdf1497b3aba2f553635e0c0f657ce311d841bf8273b6"}, + {file = "frozendict-2.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:e1b941132d79ce72d562a13341d38fc217bc1ee24d8c35a20d754e79ff99e038"}, + {file = "frozendict-2.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc2228874eacae390e63fd4f2bb513b3144066a977dc192163c9f6c7f6de6474"}, + {file = "frozendict-2.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63aa49f1919af7d45fb8fd5dec4c0859bc09f46880bd6297c79bb2db2969b63d"}, + {file = "frozendict-2.4.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6bf9260018d653f3cab9bd147bd8592bf98a5c6e338be0491ced3c196c034a3"}, + {file = "frozendict-2.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6eb716e6a6d693c03b1d53280a1947716129f5ef9bcdd061db5c17dea44b80fe"}, + {file = "frozendict-2.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d13b4310db337f4d2103867c5a05090b22bc4d50ca842093779ef541ea9c9eea"}, + {file = "frozendict-2.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:b3b967d5065872e27b06f785a80c0ed0a45d1f7c9b85223da05358e734d858ca"}, + {file = "frozendict-2.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:4ae8d05c8d0b6134bfb6bfb369d5fa0c4df21eabb5ca7f645af95fdc6689678e"}, + {file = "frozendict-2.4.4-py311-none-any.whl", hash = "sha256:705efca8d74d3facbb6ace80ab3afdd28eb8a237bfb4063ed89996b024bc443d"}, + {file = "frozendict-2.4.4-py312-none-any.whl", hash = "sha256:d9647563e76adb05b7cde2172403123380871360a114f546b4ae1704510801e5"}, + {file = "frozendict-2.4.4.tar.gz", hash = "sha256:3f7c031b26e4ee6a3f786ceb5e3abf1181c4ade92dce1f847da26ea2c96008c7"}, ] [[package]] @@ -1088,13 +1103,13 @@ files = [ [[package]] name = "identify" -version = "2.5.35" +version = "2.5.36" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, - {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, + {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, + {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, ] [package.extras] @@ -1124,22 +1139,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.1" +version = "7.1.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, + {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, + {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "indy-credx" @@ -1156,15 +1171,15 @@ files = [ [[package]] name = "indy-vdr" -version = "0.4.1" +version = "0.4.2" description = "" optional = true python-versions = ">=3.6.3" files = [ - {file = "indy_vdr-0.4.1-py3-none-macosx_10_9_universal2.whl", hash = "sha256:62d18b411e6c2ca7204299306a08cf65a43dec0982f1d9749d44f02f4817ff0a"}, - {file = "indy_vdr-0.4.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:826c128170a42ccb06d24c9018876cdfdc94fc6870661b21d47ffa022a1570bb"}, - {file = "indy_vdr-0.4.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6cbcd760059c8410c09948f8f64dbf8a80a47fe8072d7d865fcfffe7e867266b"}, - {file = "indy_vdr-0.4.1-py3-none-win_amd64.whl", hash = "sha256:c67de1c4498806ad5afaec104ca4edc2b4c7bb59e02d588c3bfd85164e017158"}, + {file = "indy_vdr-0.4.2-py3-none-macosx_10_9_universal2.whl", hash = "sha256:21e4cc22bdb1de581e4abe00e2201d970f46e05d2420437fe023052614867553"}, + {file = "indy_vdr-0.4.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:9dc8e16e8a0c4666c1a9f0a3e9967cb3dace92975b8dbb9b0aa2c7785ac5e12b"}, + {file = "indy_vdr-0.4.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:b1390ee6cbf47967c565b16b7b672969ee54485dd16963ecdd451dc128aff7c1"}, + {file = "indy_vdr-0.4.2-py3-none-win_amd64.whl", hash = "sha256:abb70e9dc46d59a6be1ac1a9b3530732c5dc8afe67f5aacba20bc7404c7d3317"}, ] [[package]] @@ -1191,13 +1206,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.3" +version = "3.1.4" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] @@ -1237,106 +1252,171 @@ typing-extensions = ">=4.5.0" [[package]] name = "lxml" -version = "5.1.0" +version = "5.2.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.6" files = [ - {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:704f5572ff473a5f897745abebc6df40f22d4133c1e0a1f124e4f2bd3330ff7e"}, - {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9d3c0f8567ffe7502d969c2c1b809892dc793b5d0665f602aad19895f8d508da"}, - {file = "lxml-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5fcfbebdb0c5d8d18b84118842f31965d59ee3e66996ac842e21f957eb76138c"}, - {file = "lxml-5.1.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f37c6d7106a9d6f0708d4e164b707037b7380fcd0b04c5bd9cae1fb46a856fb"}, - {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2befa20a13f1a75c751f47e00929fb3433d67eb9923c2c0b364de449121f447c"}, - {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22b7ee4c35f374e2c20337a95502057964d7e35b996b1c667b5c65c567d2252a"}, - {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf8443781533b8d37b295016a4b53c1494fa9a03573c09ca5104550c138d5c05"}, - {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82bddf0e72cb2af3cbba7cec1d2fd11fda0de6be8f4492223d4a268713ef2147"}, - {file = "lxml-5.1.0-cp310-cp310-win32.whl", hash = "sha256:b66aa6357b265670bb574f050ffceefb98549c721cf28351b748be1ef9577d93"}, - {file = "lxml-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:4946e7f59b7b6a9e27bef34422f645e9a368cb2be11bf1ef3cafc39a1f6ba68d"}, - {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:14deca1460b4b0f6b01f1ddc9557704e8b365f55c63070463f6c18619ebf964f"}, - {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed8c3d2cd329bf779b7ed38db176738f3f8be637bb395ce9629fc76f78afe3d4"}, - {file = "lxml-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:436a943c2900bb98123b06437cdd30580a61340fbdb7b28aaf345a459c19046a"}, - {file = "lxml-5.1.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acb6b2f96f60f70e7f34efe0c3ea34ca63f19ca63ce90019c6cbca6b676e81fa"}, - {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af8920ce4a55ff41167ddbc20077f5698c2e710ad3353d32a07d3264f3a2021e"}, - {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cfced4a069003d8913408e10ca8ed092c49a7f6cefee9bb74b6b3e860683b45"}, - {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9e5ac3437746189a9b4121db2a7b86056ac8786b12e88838696899328fc44bb2"}, - {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4c9bda132ad108b387c33fabfea47866af87f4ea6ffb79418004f0521e63204"}, - {file = "lxml-5.1.0-cp311-cp311-win32.whl", hash = "sha256:bc64d1b1dab08f679fb89c368f4c05693f58a9faf744c4d390d7ed1d8223869b"}, - {file = "lxml-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5ab722ae5a873d8dcee1f5f45ddd93c34210aed44ff2dc643b5025981908cda"}, - {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9aa543980ab1fbf1720969af1d99095a548ea42e00361e727c58a40832439114"}, - {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6f11b77ec0979f7e4dc5ae081325a2946f1fe424148d3945f943ceaede98adb8"}, - {file = "lxml-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a36c506e5f8aeb40680491d39ed94670487ce6614b9d27cabe45d94cd5d63e1e"}, - {file = "lxml-5.1.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f643ffd2669ffd4b5a3e9b41c909b72b2a1d5e4915da90a77e119b8d48ce867a"}, - {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16dd953fb719f0ffc5bc067428fc9e88f599e15723a85618c45847c96f11f431"}, - {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16018f7099245157564d7148165132c70adb272fb5a17c048ba70d9cc542a1a1"}, - {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82cd34f1081ae4ea2ede3d52f71b7be313756e99b4b5f829f89b12da552d3aa3"}, - {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:19a1bc898ae9f06bccb7c3e1dfd73897ecbbd2c96afe9095a6026016e5ca97b8"}, - {file = "lxml-5.1.0-cp312-cp312-win32.whl", hash = "sha256:13521a321a25c641b9ea127ef478b580b5ec82aa2e9fc076c86169d161798b01"}, - {file = "lxml-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:1ad17c20e3666c035db502c78b86e58ff6b5991906e55bdbef94977700c72623"}, - {file = "lxml-5.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:24ef5a4631c0b6cceaf2dbca21687e29725b7c4e171f33a8f8ce23c12558ded1"}, - {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d2900b7f5318bc7ad8631d3d40190b95ef2aa8cc59473b73b294e4a55e9f30f"}, - {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:601f4a75797d7a770daed8b42b97cd1bb1ba18bd51a9382077a6a247a12aa38d"}, - {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4b68c961b5cc402cbd99cca5eb2547e46ce77260eb705f4d117fd9c3f932b95"}, - {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:afd825e30f8d1f521713a5669b63657bcfe5980a916c95855060048b88e1adb7"}, - {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:262bc5f512a66b527d026518507e78c2f9c2bd9eb5c8aeeb9f0eb43fcb69dc67"}, - {file = "lxml-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:e856c1c7255c739434489ec9c8aa9cdf5179785d10ff20add308b5d673bed5cd"}, - {file = "lxml-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c7257171bb8d4432fe9d6fdde4d55fdbe663a63636a17f7f9aaba9bcb3153ad7"}, - {file = "lxml-5.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9e240ae0ba96477682aa87899d94ddec1cc7926f9df29b1dd57b39e797d5ab5"}, - {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a96f02ba1bcd330807fc060ed91d1f7a20853da6dd449e5da4b09bfcc08fdcf5"}, - {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3898ae2b58eeafedfe99e542a17859017d72d7f6a63de0f04f99c2cb125936"}, - {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61c5a7edbd7c695e54fca029ceb351fc45cd8860119a0f83e48be44e1c464862"}, - {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3aeca824b38ca78d9ee2ab82bd9883083d0492d9d17df065ba3b94e88e4d7ee6"}, - {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8f52fe6859b9db71ee609b0c0a70fea5f1e71c3462ecf144ca800d3f434f0764"}, - {file = "lxml-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:d42e3a3fc18acc88b838efded0e6ec3edf3e328a58c68fbd36a7263a874906c8"}, - {file = "lxml-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:eac68f96539b32fce2c9b47eb7c25bb2582bdaf1bbb360d25f564ee9e04c542b"}, - {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ae15347a88cf8af0949a9872b57a320d2605ae069bcdf047677318bc0bba45b1"}, - {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c26aab6ea9c54d3bed716b8851c8bfc40cb249b8e9880e250d1eddde9f709bf5"}, - {file = "lxml-5.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:342e95bddec3a698ac24378d61996b3ee5ba9acfeb253986002ac53c9a5f6f84"}, - {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:725e171e0b99a66ec8605ac77fa12239dbe061482ac854d25720e2294652eeaa"}, - {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d184e0d5c918cff04cdde9dbdf9600e960161d773666958c9d7b565ccc60c45"}, - {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:98f3f020a2b736566c707c8e034945c02aa94e124c24f77ca097c446f81b01f1"}, - {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d48fc57e7c1e3df57be5ae8614bab6d4e7b60f65c5457915c26892c41afc59e"}, - {file = "lxml-5.1.0-cp38-cp38-win32.whl", hash = "sha256:7ec465e6549ed97e9f1e5ed51c657c9ede767bc1c11552f7f4d022c4df4a977a"}, - {file = "lxml-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:b21b4031b53d25b0858d4e124f2f9131ffc1530431c6d1321805c90da78388d1"}, - {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:52427a7eadc98f9e62cb1368a5079ae826f94f05755d2d567d93ee1bc3ceb354"}, - {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6a2a2c724d97c1eb8cf966b16ca2915566a4904b9aad2ed9a09c748ffe14f969"}, - {file = "lxml-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843b9c835580d52828d8f69ea4302537337a21e6b4f1ec711a52241ba4a824f3"}, - {file = "lxml-5.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b99f564659cfa704a2dd82d0684207b1aadf7d02d33e54845f9fc78e06b7581"}, - {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8b0c78e7aac24979ef09b7f50da871c2de2def043d468c4b41f512d831e912"}, - {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bcf86dfc8ff3e992fed847c077bd875d9e0ba2fa25d859c3a0f0f76f07f0c8d"}, - {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:49a9b4af45e8b925e1cd6f3b15bbba2c81e7dba6dce170c677c9cda547411e14"}, - {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:280f3edf15c2a967d923bcfb1f8f15337ad36f93525828b40a0f9d6c2ad24890"}, - {file = "lxml-5.1.0-cp39-cp39-win32.whl", hash = "sha256:ed7326563024b6e91fef6b6c7a1a2ff0a71b97793ac33dbbcf38f6005e51ff6e"}, - {file = "lxml-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8d7b4beebb178e9183138f552238f7e6613162a42164233e2bda00cb3afac58f"}, - {file = "lxml-5.1.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9bd0ae7cc2b85320abd5e0abad5ccee5564ed5f0cc90245d2f9a8ef330a8deae"}, - {file = "lxml-5.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c1d679df4361408b628f42b26a5d62bd3e9ba7f0c0e7969f925021554755aa"}, - {file = "lxml-5.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2ad3a8ce9e8a767131061a22cd28fdffa3cd2dc193f399ff7b81777f3520e372"}, - {file = "lxml-5.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:304128394c9c22b6569eba2a6d98392b56fbdfbad58f83ea702530be80d0f9df"}, - {file = "lxml-5.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d74fcaf87132ffc0447b3c685a9f862ffb5b43e70ea6beec2fb8057d5d2a1fea"}, - {file = "lxml-5.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8cf5877f7ed384dabfdcc37922c3191bf27e55b498fecece9fd5c2c7aaa34c33"}, - {file = "lxml-5.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:877efb968c3d7eb2dad540b6cabf2f1d3c0fbf4b2d309a3c141f79c7e0061324"}, - {file = "lxml-5.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f14a4fb1c1c402a22e6a341a24c1341b4a3def81b41cd354386dcb795f83897"}, - {file = "lxml-5.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:25663d6e99659544ee8fe1b89b1a8c0aaa5e34b103fab124b17fa958c4a324a6"}, - {file = "lxml-5.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8b9f19df998761babaa7f09e6bc169294eefafd6149aaa272081cbddc7ba4ca3"}, - {file = "lxml-5.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e53d7e6a98b64fe54775d23a7c669763451340c3d44ad5e3a3b48a1efbdc96f"}, - {file = "lxml-5.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c3cd1fc1dc7c376c54440aeaaa0dcc803d2126732ff5c6b68ccd619f2e64be4f"}, - {file = "lxml-5.1.0.tar.gz", hash = "sha256:3eea6ed6e6c918e468e693c41ef07f3c3acc310b70ddd9cc72d9ef84bc9564ca"}, + {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:364d03207f3e603922d0d3932ef363d55bbf48e3647395765f9bfcbdf6d23632"}, + {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50127c186f191b8917ea2fb8b206fbebe87fd414a6084d15568c27d0a21d60db"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4f025ef3db1c6da4460dd27c118d8cd136d0391da4e387a15e48e5c975147"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981a06a3076997adf7c743dcd0d7a0415582661e2517c7d961493572e909aa1d"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aef5474d913d3b05e613906ba4090433c515e13ea49c837aca18bde190853dff"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e275ea572389e41e8b039ac076a46cb87ee6b8542df3fff26f5baab43713bca"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5b65529bb2f21ac7861a0e94fdbf5dc0daab41497d18223b46ee8515e5ad297"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bcc98f911f10278d1daf14b87d65325851a1d29153caaf146877ec37031d5f36"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:b47633251727c8fe279f34025844b3b3a3e40cd1b198356d003aa146258d13a2"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:fbc9d316552f9ef7bba39f4edfad4a734d3d6f93341232a9dddadec4f15d425f"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:13e69be35391ce72712184f69000cda04fc89689429179bc4c0ae5f0b7a8c21b"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b6a30a9ab040b3f545b697cb3adbf3696c05a3a68aad172e3fd7ca73ab3c835"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a233bb68625a85126ac9f1fc66d24337d6e8a0f9207b688eec2e7c880f012ec0"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dfa7c241073d8f2b8e8dbc7803c434f57dbb83ae2a3d7892dd068d99e96efe2c"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a7aca7964ac4bb07680d5c9d63b9d7028cace3e2d43175cb50bba8c5ad33316"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae4073a60ab98529ab8a72ebf429f2a8cc612619a8c04e08bed27450d52103c0"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ffb2be176fed4457e445fe540617f0252a72a8bc56208fd65a690fdb1f57660b"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e290d79a4107d7d794634ce3e985b9ae4f920380a813717adf61804904dc4393"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96e85aa09274955bb6bd483eaf5b12abadade01010478154b0ec70284c1b1526"}, + {file = "lxml-5.2.2-cp310-cp310-win32.whl", hash = "sha256:f956196ef61369f1685d14dad80611488d8dc1ef00be57c0c5a03064005b0f30"}, + {file = "lxml-5.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:875a3f90d7eb5c5d77e529080d95140eacb3c6d13ad5b616ee8095447b1d22e7"}, + {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45f9494613160d0405682f9eee781c7e6d1bf45f819654eb249f8f46a2c22545"}, + {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0b3f2df149efb242cee2ffdeb6674b7f30d23c9a7af26595099afaf46ef4e88"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d28cb356f119a437cc58a13f8135ab8a4c8ece18159eb9194b0d269ec4e28083"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657a972f46bbefdbba2d4f14413c0d079f9ae243bd68193cb5061b9732fa54c1"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b9ea10063efb77a965a8d5f4182806fbf59ed068b3c3fd6f30d2ac7bee734"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07542787f86112d46d07d4f3c4e7c760282011b354d012dc4141cc12a68cef5f"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:303f540ad2dddd35b92415b74b900c749ec2010e703ab3bfd6660979d01fd4ed"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2eb2227ce1ff998faf0cd7fe85bbf086aa41dfc5af3b1d80867ecfe75fb68df3"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:1d8a701774dfc42a2f0b8ccdfe7dbc140500d1049e0632a611985d943fcf12df"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:56793b7a1a091a7c286b5f4aa1fe4ae5d1446fe742d00cdf2ffb1077865db10d"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eb00b549b13bd6d884c863554566095bf6fa9c3cecb2e7b399c4bc7904cb33b5"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a2569a1f15ae6c8c64108a2cd2b4a858fc1e13d25846be0666fc144715e32ab"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:8cf85a6e40ff1f37fe0f25719aadf443686b1ac7652593dc53c7ef9b8492b115"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d237ba6664b8e60fd90b8549a149a74fcc675272e0e95539a00522e4ca688b04"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b3f5016e00ae7630a4b83d0868fca1e3d494c78a75b1c7252606a3a1c5fc2ad"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23441e2b5339bc54dc949e9e675fa35efe858108404ef9aa92f0456929ef6fe8"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2fb0ba3e8566548d6c8e7dd82a8229ff47bd8fb8c2da237607ac8e5a1b8312e5"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:79d1fb9252e7e2cfe4de6e9a6610c7cbb99b9708e2c3e29057f487de5a9eaefa"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6dcc3d17eac1df7859ae01202e9bb11ffa8c98949dcbeb1069c8b9a75917e01b"}, + {file = "lxml-5.2.2-cp311-cp311-win32.whl", hash = "sha256:4c30a2f83677876465f44c018830f608fa3c6a8a466eb223535035fbc16f3438"}, + {file = "lxml-5.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:49095a38eb333aaf44c06052fd2ec3b8f23e19747ca7ec6f6c954ffea6dbf7be"}, + {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7429e7faa1a60cad26ae4227f4dd0459efde239e494c7312624ce228e04f6391"}, + {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50ccb5d355961c0f12f6cf24b7187dbabd5433f29e15147a67995474f27d1776"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc911208b18842a3a57266d8e51fc3cfaccee90a5351b92079beed912a7914c2"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ce9e786753743159799fdf8e92a5da351158c4bfb6f2db0bf31e7892a1feb5"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec87c44f619380878bd49ca109669c9f221d9ae6883a5bcb3616785fa8f94c97"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08ea0f606808354eb8f2dfaac095963cb25d9d28e27edcc375d7b30ab01abbf6"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75a9632f1d4f698b2e6e2e1ada40e71f369b15d69baddb8968dcc8e683839b18"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74da9f97daec6928567b48c90ea2c82a106b2d500f397eeb8941e47d30b1ca85"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:0969e92af09c5687d769731e3f39ed62427cc72176cebb54b7a9d52cc4fa3b73"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:9164361769b6ca7769079f4d426a41df6164879f7f3568be9086e15baca61466"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d26a618ae1766279f2660aca0081b2220aca6bd1aa06b2cf73f07383faf48927"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab67ed772c584b7ef2379797bf14b82df9aa5f7438c5b9a09624dd834c1c1aaf"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3d1e35572a56941b32c239774d7e9ad724074d37f90c7a7d499ab98761bd80cf"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8268cbcd48c5375f46e000adb1390572c98879eb4f77910c6053d25cc3ac2c67"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e282aedd63c639c07c3857097fc0e236f984ceb4089a8b284da1c526491e3f3d"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfdc2bfe69e9adf0df4915949c22a25b39d175d599bf98e7ddf620a13678585"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4aefd911793b5d2d7a921233a54c90329bf3d4a6817dc465f12ffdfe4fc7b8fe"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8b8df03a9e995b6211dafa63b32f9d405881518ff1ddd775db4e7b98fb545e1c"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f11ae142f3a322d44513de1018b50f474f8f736bc3cd91d969f464b5bfef8836"}, + {file = "lxml-5.2.2-cp312-cp312-win32.whl", hash = "sha256:16a8326e51fcdffc886294c1e70b11ddccec836516a343f9ed0f82aac043c24a"}, + {file = "lxml-5.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:bbc4b80af581e18568ff07f6395c02114d05f4865c2812a1f02f2eaecf0bfd48"}, + {file = "lxml-5.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3d9d13603410b72787579769469af730c38f2f25505573a5888a94b62b920f8"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38b67afb0a06b8575948641c1d6d68e41b83a3abeae2ca9eed2ac59892b36706"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c689d0d5381f56de7bd6966a4541bff6e08bf8d3871bbd89a0c6ab18aa699573"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:cf2a978c795b54c539f47964ec05e35c05bd045db5ca1e8366988c7f2fe6b3ce"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:739e36ef7412b2bd940f75b278749106e6d025e40027c0b94a17ef7968d55d56"}, + {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d8bbcd21769594dbba9c37d3c819e2d5847656ca99c747ddb31ac1701d0c0ed9"}, + {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2304d3c93f2258ccf2cf7a6ba8c761d76ef84948d87bf9664e14d203da2cd264"}, + {file = "lxml-5.2.2-cp36-cp36m-win32.whl", hash = "sha256:02437fb7308386867c8b7b0e5bc4cd4b04548b1c5d089ffb8e7b31009b961dc3"}, + {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, + {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, + {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, + {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, + {file = "lxml-5.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ed07b3062b055d7a7f9d6557a251cc655eed0b3152b76de619516621c56f5d3"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60fdd125d85bf9c279ffb8e94c78c51b3b6a37711464e1f5f31078b45002421"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7e24cb69ee5f32e003f50e016d5fde438010c1022c96738b04fc2423e61706"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cfafd56887eaed93d07bc4547abd5e09d837a002b791e9767765492a75883f"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19b4e485cd07b7d83e3fe3b72132e7df70bfac22b14fe4bf7a23822c3a35bff5"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7ce7ad8abebe737ad6143d9d3bf94b88b93365ea30a5b81f6877ec9c0dee0a48"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e49b052b768bb74f58c7dda4e0bdf7b79d43a9204ca584ffe1fb48a6f3c84c66"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d14a0d029a4e176795cef99c056d58067c06195e0c7e2dbb293bf95c08f772a3"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:be49ad33819d7dcc28a309b86d4ed98e1a65f3075c6acd3cd4fe32103235222b"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a6d17e0370d2516d5bb9062c7b4cb731cff921fc875644c3d751ad857ba9c5b1"}, + {file = "lxml-5.2.2-cp38-cp38-win32.whl", hash = "sha256:5b8c041b6265e08eac8a724b74b655404070b636a8dd6d7a13c3adc07882ef30"}, + {file = "lxml-5.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:f61efaf4bed1cc0860e567d2ecb2363974d414f7f1f124b1df368bbf183453a6"}, + {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb91819461b1b56d06fa4bcf86617fac795f6a99d12239fb0c68dbeba41a0a30"}, + {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4ed0c7cbecde7194cd3228c044e86bf73e30a23505af852857c09c24e77ec5d"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54401c77a63cc7d6dc4b4e173bb484f28a5607f3df71484709fe037c92d4f0ed"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:625e3ef310e7fa3a761d48ca7ea1f9d8718a32b1542e727d584d82f4453d5eeb"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:519895c99c815a1a24a926d5b60627ce5ea48e9f639a5cd328bda0515ea0f10c"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7079d5eb1c1315a858bbf180000757db8ad904a89476653232db835c3114001"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:343ab62e9ca78094f2306aefed67dcfad61c4683f87eee48ff2fd74902447726"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd9e78285da6c9ba2d5c769628f43ef66d96ac3085e59b10ad4f3707980710d3"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:546cf886f6242dff9ec206331209db9c8e1643ae642dea5fdbecae2453cb50fd"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:02f6a8eb6512fdc2fd4ca10a49c341c4e109aa6e9448cc4859af5b949622715a"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:339ee4a4704bc724757cd5dd9dc8cf4d00980f5d3e6e06d5847c1b594ace68ab"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0a028b61a2e357ace98b1615fc03f76eb517cc028993964fe08ad514b1e8892d"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f90e552ecbad426eab352e7b2933091f2be77115bb16f09f78404861c8322981"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d83e2d94b69bf31ead2fa45f0acdef0757fa0458a129734f59f67f3d2eb7ef32"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a02d3c48f9bb1e10c7788d92c0c7db6f2002d024ab6e74d6f45ae33e3d0288a3"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d68ce8e7b2075390e8ac1e1d3a99e8b6372c694bbe612632606d1d546794207"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:453d037e09a5176d92ec0fd282e934ed26d806331a8b70ab431a81e2fbabf56d"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3b019d4ee84b683342af793b56bb35034bd749e4cbdd3d33f7d1107790f8c472"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb3942960f0beb9f46e2a71a3aca220d1ca32feb5a398656be934320804c0df9"}, + {file = "lxml-5.2.2-cp39-cp39-win32.whl", hash = "sha256:ac6540c9fff6e3813d29d0403ee7a81897f1d8ecc09a8ff84d2eea70ede1cdbf"}, + {file = "lxml-5.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:610b5c77428a50269f38a534057444c249976433f40f53e3b47e68349cca1425"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b537bd04d7ccd7c6350cdaaaad911f6312cbd61e6e6045542f781c7f8b2e99d2"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4820c02195d6dfb7b8508ff276752f6b2ff8b64ae5d13ebe02e7667e035000b9"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a09f6184f17a80897172863a655467da2b11151ec98ba8d7af89f17bf63dae"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76acba4c66c47d27c8365e7c10b3d8016a7da83d3191d053a58382311a8bf4e1"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b128092c927eaf485928cec0c28f6b8bead277e28acf56800e972aa2c2abd7a2"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ae791f6bd43305aade8c0e22f816b34f3b72b6c820477aab4d18473a37e8090b"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a2f6a1bc2460e643785a2cde17293bd7a8f990884b822f7bca47bee0a82fc66b"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e8d351ff44c1638cb6e980623d517abd9f580d2e53bfcd18d8941c052a5a009"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec4bd9133420c5c52d562469c754f27c5c9e36ee06abc169612c959bd7dbb07"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:55ce6b6d803890bd3cc89975fca9de1dff39729b43b73cb15ddd933b8bc20484"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab6a358d1286498d80fe67bd3d69fcbc7d1359b45b41e74c4a26964ca99c3f8"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:06668e39e1f3c065349c51ac27ae430719d7806c026fec462e5693b08b95696b"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9cd5323344d8ebb9fb5e96da5de5ad4ebab993bbf51674259dbe9d7a18049525"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89feb82ca055af0fe797a2323ec9043b26bc371365847dbe83c7fd2e2f181c34"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e481bba1e11ba585fb06db666bfc23dbe181dbafc7b25776156120bf12e0d5a6"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d6c6ea6a11ca0ff9cd0390b885984ed31157c168565702959c25e2191674a14"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3d98de734abee23e61f6b8c2e08a88453ada7d6486dc7cdc82922a03968928db"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:69ab77a1373f1e7563e0fb5a29a8440367dec051da6c7405333699d07444f511"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34e17913c431f5ae01d8658dbf792fdc457073dcdfbb31dc0cc6ab256e664a8d"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f8757b03208c3f50097761be2dea0aba02e94f0dc7023ed73a7bb14ff11eb0"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a520b4f9974b0a0a6ed73c2154de57cdfd0c8800f4f15ab2b73238ffed0b36e"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5e097646944b66207023bc3c634827de858aebc226d5d4d6d16f0b77566ea182"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b5e4ef22ff25bfd4ede5f8fb30f7b24446345f3e79d9b7455aef2836437bc38a"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff69a9a0b4b17d78170c73abe2ab12084bdf1691550c5629ad1fe7849433f324"}, + {file = "lxml-5.2.2.tar.gz", hash = "sha256:bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87"}, ] [package.extras] cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.7)"] +source = ["Cython (>=3.0.10)"] [[package]] name = "markdown" -version = "3.5.2" +version = "3.6" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, - {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, ] [package.dependencies] @@ -1444,22 +1524,6 @@ docs = ["alabaster (==0.7.15)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "s lint = ["pre-commit (>=2.4,<4.0)"] tests = ["pytest", "pytz", "simplejson"] -[[package]] -name = "mock" -version = "4.0.3" -description = "Rolling backport of unittest.mock for all Pythons" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mock-4.0.3-py3-none-any.whl", hash = "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62"}, - {file = "mock-4.0.3.tar.gz", hash = "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc"}, -] - -[package.extras] -build = ["blurb", "twine", "wheel"] -docs = ["sphinx"] -test = ["pytest (<5.4)", "pytest-cov"] - [[package]] name = "multidict" version = "6.0.5" @@ -1572,29 +1636,26 @@ files = [ [[package]] name = "nest-asyncio" -version = "1.5.9" +version = "1.6.0" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.5.9-py3-none-any.whl", hash = "sha256:61ec07ef052e72e3de22045b81b2cc7d71fceb04c568ba0b2e4b2f9f5231bec2"}, - {file = "nest_asyncio-1.5.9.tar.gz", hash = "sha256:d1e1144e9c6e3e6392e0fcf5211cb1c8374b5648a98f1ebe48e5336006b41907"}, + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, ] [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.0" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.0-py2.py3-none-any.whl", hash = "sha256:508ecec98f9f3330b636d4448c0f1a56fc68017c68f1e7857ebc52acf0eb879a"}, + {file = "nodeenv-1.9.0.tar.gz", hash = "sha256:07f144e90dae547bf0d4ee8da0ee42664a42a04e02ed68e06324348dafe4bdb1"}, ] -[package.dependencies] -setuptools = "*" - [[package]] name = "packaging" version = "23.1" @@ -1705,28 +1766,29 @@ xmp = ["defusedxml"] [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -1746,13 +1808,13 @@ files = [ [[package]] name = "portalocker" -version = "2.7.0" +version = "2.8.2" description = "Wraps the portalocker recipe for easy usage" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "portalocker-2.7.0-py2.py3-none-any.whl", hash = "sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983"}, - {file = "portalocker-2.7.0.tar.gz", hash = "sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51"}, + {file = "portalocker-2.8.2-py3-none-any.whl", hash = "sha256:cfb86acc09b9aa7c3b43594e19be1345b9d16af3feb08bf92f23d4dce513a28e"}, + {file = "portalocker-2.8.2.tar.gz", hash = "sha256:2b035aa7828e46c58e9b31390ee1f169b98e1066ab10b9a6a861fe7e25ee4f33"}, ] [package.dependencies] @@ -1761,17 +1823,17 @@ pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} [package.extras] docs = ["sphinx (>=1.7.1)"] redis = ["redis"] -tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)"] +tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] [[package]] name = "pre-commit" -version = "3.3.3" +version = "3.7.1" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pre_commit-3.3.3-py2.py3-none-any.whl", hash = "sha256:10badb65d6a38caff29703362271d7dca483d01da88f9d7e05d0b97171c136cb"}, - {file = "pre_commit-3.3.3.tar.gz", hash = "sha256:a2256f489cd913d575c145132ae196fe335da32d91a8294b7afe6622335dd023"}, + {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"}, + {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"}, ] [package.dependencies] @@ -1833,66 +1895,124 @@ files = [ [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] name = "pydantic" -version = "1.10.14" -description = "Data validation and settings management using python type hints" +version = "2.7.2" +description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, + {file = "pydantic-2.7.2-py3-none-any.whl", hash = "sha256:834ab954175f94e6e68258537dc49402c4a5e9d0409b9f1b86b7e934a8372de7"}, + {file = "pydantic-2.7.2.tar.gz", hash = "sha256:71b2945998f9c9b7919a45bde9a50397b289937d215ae141c1d0903ba7149fd7"}, ] [package.dependencies] -typing-extensions = ">=4.2.0" +annotated-types = ">=0.4.0" +pydantic-core = "2.18.3" +typing-extensions = ">=4.6.1" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.18.3" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.18.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:744697428fcdec6be5670460b578161d1ffe34743a5c15656be7ea82b008197c"}, + {file = "pydantic_core-2.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37b40c05ced1ba4218b14986fe6f283d22e1ae2ff4c8e28881a70fb81fbfcda7"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a9a75622357076efb6b311983ff190fbfb3c12fc3a853122b34d3d358126c"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2e253af04ceaebde8eb201eb3f3e3e7e390f2d275a88300d6a1959d710539e2"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:855ec66589c68aa367d989da5c4755bb74ee92ccad4fdb6af942c3612c067e34"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d3e42bb54e7e9d72c13ce112e02eb1b3b55681ee948d748842171201a03a98a"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6ac9ffccc9d2e69d9fba841441d4259cb668ac180e51b30d3632cd7abca2b9b"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c56eca1686539fa0c9bda992e7bd6a37583f20083c37590413381acfc5f192d6"}, + {file = "pydantic_core-2.18.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:17954d784bf8abfc0ec2a633108207ebc4fa2df1a0e4c0c3ccbaa9bb01d2c426"}, + {file = "pydantic_core-2.18.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:98ed737567d8f2ecd54f7c8d4f8572ca7c7921ede93a2e52939416170d357812"}, + {file = "pydantic_core-2.18.3-cp310-none-win32.whl", hash = "sha256:9f9e04afebd3ed8c15d67a564ed0a34b54e52136c6d40d14c5547b238390e779"}, + {file = "pydantic_core-2.18.3-cp310-none-win_amd64.whl", hash = "sha256:45e4ffbae34f7ae30d0047697e724e534a7ec0a82ef9994b7913a412c21462a0"}, + {file = "pydantic_core-2.18.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b9ebe8231726c49518b16b237b9fe0d7d361dd221302af511a83d4ada01183ab"}, + {file = "pydantic_core-2.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b8e20e15d18bf7dbb453be78a2d858f946f5cdf06c5072453dace00ab652e2b2"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0d9ff283cd3459fa0bf9b0256a2b6f01ac1ff9ffb034e24457b9035f75587cb"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f7ef5f0ebb77ba24c9970da18b771711edc5feaf00c10b18461e0f5f5949231"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73038d66614d2e5cde30435b5afdced2b473b4c77d4ca3a8624dd3e41a9c19be"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6afd5c867a74c4d314c557b5ea9520183fadfbd1df4c2d6e09fd0d990ce412cd"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd7df92f28d351bb9f12470f4c533cf03d1b52ec5a6e5c58c65b183055a60106"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:80aea0ffeb1049336043d07799eace1c9602519fb3192916ff525b0287b2b1e4"}, + {file = "pydantic_core-2.18.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:aaee40f25bba38132e655ffa3d1998a6d576ba7cf81deff8bfa189fb43fd2bbe"}, + {file = "pydantic_core-2.18.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9128089da8f4fe73f7a91973895ebf2502539d627891a14034e45fb9e707e26d"}, + {file = "pydantic_core-2.18.3-cp311-none-win32.whl", hash = "sha256:fec02527e1e03257aa25b1a4dcbe697b40a22f1229f5d026503e8b7ff6d2eda7"}, + {file = "pydantic_core-2.18.3-cp311-none-win_amd64.whl", hash = "sha256:58ff8631dbab6c7c982e6425da8347108449321f61fe427c52ddfadd66642af7"}, + {file = "pydantic_core-2.18.3-cp311-none-win_arm64.whl", hash = "sha256:3fc1c7f67f34c6c2ef9c213e0f2a351797cda98249d9ca56a70ce4ebcaba45f4"}, + {file = "pydantic_core-2.18.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f0928cde2ae416a2d1ebe6dee324709c6f73e93494d8c7aea92df99aab1fc40f"}, + {file = "pydantic_core-2.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bee9bb305a562f8b9271855afb6ce00223f545de3d68560b3c1649c7c5295e9"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e862823be114387257dacbfa7d78547165a85d7add33b446ca4f4fae92c7ff5c"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a36f78674cbddc165abab0df961b5f96b14461d05feec5e1f78da58808b97e7"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba905d184f62e7ddbb7a5a751d8a5c805463511c7b08d1aca4a3e8c11f2e5048"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fdd362f6a586e681ff86550b2379e532fee63c52def1c666887956748eaa326"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24b214b7ee3bd3b865e963dbed0f8bc5375f49449d70e8d407b567af3222aae4"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:691018785779766127f531674fa82bb368df5b36b461622b12e176c18e119022"}, + {file = "pydantic_core-2.18.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:60e4c625e6f7155d7d0dcac151edf5858102bc61bf959d04469ca6ee4e8381bd"}, + {file = "pydantic_core-2.18.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4e651e47d981c1b701dcc74ab8fec5a60a5b004650416b4abbef13db23bc7be"}, + {file = "pydantic_core-2.18.3-cp312-none-win32.whl", hash = "sha256:ffecbb5edb7f5ffae13599aec33b735e9e4c7676ca1633c60f2c606beb17efc5"}, + {file = "pydantic_core-2.18.3-cp312-none-win_amd64.whl", hash = "sha256:2c8333f6e934733483c7eddffdb094c143b9463d2af7e6bd85ebcb2d4a1b82c6"}, + {file = "pydantic_core-2.18.3-cp312-none-win_arm64.whl", hash = "sha256:7a20dded653e516a4655f4c98e97ccafb13753987434fe7cf044aa25f5b7d417"}, + {file = "pydantic_core-2.18.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:eecf63195be644b0396f972c82598cd15693550f0ff236dcf7ab92e2eb6d3522"}, + {file = "pydantic_core-2.18.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c44efdd3b6125419c28821590d7ec891c9cb0dff33a7a78d9d5c8b6f66b9702"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e59fca51ffbdd1638b3856779342ed69bcecb8484c1d4b8bdb237d0eb5a45e2"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70cf099197d6b98953468461d753563b28e73cf1eade2ffe069675d2657ed1d5"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63081a49dddc6124754b32a3774331467bfc3d2bd5ff8f10df36a95602560361"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:370059b7883485c9edb9655355ff46d912f4b03b009d929220d9294c7fd9fd60"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a64faeedfd8254f05f5cf6fc755023a7e1606af3959cfc1a9285744cc711044"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19d2e725de0f90d8671f89e420d36c3dd97639b98145e42fcc0e1f6d492a46dc"}, + {file = "pydantic_core-2.18.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:67bc078025d70ec5aefe6200ef094576c9d86bd36982df1301c758a9fff7d7f4"}, + {file = "pydantic_core-2.18.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:adf952c3f4100e203cbaf8e0c907c835d3e28f9041474e52b651761dc248a3c0"}, + {file = "pydantic_core-2.18.3-cp38-none-win32.whl", hash = "sha256:9a46795b1f3beb167eaee91736d5d17ac3a994bf2215a996aed825a45f897558"}, + {file = "pydantic_core-2.18.3-cp38-none-win_amd64.whl", hash = "sha256:200ad4e3133cb99ed82342a101a5abf3d924722e71cd581cc113fe828f727fbc"}, + {file = "pydantic_core-2.18.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:304378b7bf92206036c8ddd83a2ba7b7d1a5b425acafff637172a3aa72ad7083"}, + {file = "pydantic_core-2.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c826870b277143e701c9ccf34ebc33ddb4d072612683a044e7cce2d52f6c3fef"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e201935d282707394f3668380e41ccf25b5794d1b131cdd96b07f615a33ca4b1"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5560dda746c44b48bf82b3d191d74fe8efc5686a9ef18e69bdabccbbb9ad9442"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b32c2a1f8032570842257e4c19288eba9a2bba4712af542327de9a1204faff8"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:929c24e9dea3990bc8bcd27c5f2d3916c0c86f5511d2caa69e0d5290115344a9"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1a8376fef60790152564b0eab376b3e23dd6e54f29d84aad46f7b264ecca943"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dccf3ef1400390ddd1fb55bf0632209d39140552d068ee5ac45553b556780e06"}, + {file = "pydantic_core-2.18.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:41dbdcb0c7252b58fa931fec47937edb422c9cb22528f41cb8963665c372caf6"}, + {file = "pydantic_core-2.18.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:666e45cf071669fde468886654742fa10b0e74cd0fa0430a46ba6056b24fb0af"}, + {file = "pydantic_core-2.18.3-cp39-none-win32.whl", hash = "sha256:f9c08cabff68704a1b4667d33f534d544b8a07b8e5d039c37067fceb18789e78"}, + {file = "pydantic_core-2.18.3-cp39-none-win_amd64.whl", hash = "sha256:4afa5f5973e8572b5c0dcb4e2d4fda7890e7cd63329bd5cc3263a25c92ef0026"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:77319771a026f7c7d29c6ebc623de889e9563b7087911b46fd06c044a12aa5e9"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:df11fa992e9f576473038510d66dd305bcd51d7dd508c163a8c8fe148454e059"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d531076bdfb65af593326ffd567e6ab3da145020dafb9187a1d131064a55f97c"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d33ce258e4e6e6038f2b9e8b8a631d17d017567db43483314993b3ca345dcbbb"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1f9cd7f5635b719939019be9bda47ecb56e165e51dd26c9a217a433e3d0d59a9"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cd4a032bb65cc132cae1fe3e52877daecc2097965cd3914e44fbd12b00dae7c5"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f2718430098bcdf60402136c845e4126a189959d103900ebabb6774a5d9fdb"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c0037a92cf0c580ed14e10953cdd26528e8796307bb8bb312dc65f71547df04d"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b95a0972fac2b1ff3c94629fc9081b16371dad870959f1408cc33b2f78ad347a"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a62e437d687cc148381bdd5f51e3e81f5b20a735c55f690c5be94e05da2b0d5c"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b367a73a414bbb08507da102dc2cde0fa7afe57d09b3240ce82a16d608a7679c"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ecce4b2360aa3f008da3327d652e74a0e743908eac306198b47e1c58b03dd2b"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd4435b8d83f0c9561a2a9585b1de78f1abb17cb0cef5f39bf6a4b47d19bafe3"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:616221a6d473c5b9aa83fa8982745441f6a4a62a66436be9445c65f241b86c94"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7e6382ce89a92bc1d0c0c5edd51e931432202b9080dc921d8d003e616402efd1"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff58f379345603d940e461eae474b6bbb6dab66ed9a851ecd3cb3709bf4dcf6a"}, + {file = "pydantic_core-2.18.3.tar.gz", hash = "sha256:432e999088d85c8f36b9a3f769a8e2b57aabd817bbb729a90d1fe7f18f6f1f39"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydevd" @@ -1926,33 +2046,32 @@ files = [ [[package]] name = "pydid" -version = "0.4.3" +version = "0.5.0" description = "Python library for validating, constructing, and representing DIDs and DID Documents" optional = false -python-versions = ">=3.8.0,<4.0.0" +python-versions = "<4.0.0,>=3.9.0" files = [ - {file = "pydid-0.4.3-py3-none-any.whl", hash = "sha256:39a586b4f26c41277b93db2aaf0a2db298f48ccc413bdfc71b7dd010045f31f4"}, - {file = "pydid-0.4.3.tar.gz", hash = "sha256:1a48a6940bae8279083ebb7c5ab5fe0249e9ba3ea638de9cf8c127487b96b2ef"}, + {file = "pydid-0.5.0-py3-none-any.whl", hash = "sha256:2562852d2af98ce1a404d64b0826344d811ad78142927da3a84116f1103eac43"}, + {file = "pydid-0.5.0.tar.gz", hash = "sha256:c97c543e019c469fae0939bab454bedf8e010668e746935e3094e13bdfad28d0"}, ] [package.dependencies] inflection = ">=0.5.1,<0.6.0" -pydantic = ">=1.10.0,<2.0.0" -typing-extensions = ">=4.5.0,<5.0.0" +pydantic = ">=2.7.0,<3.0.0" +typing-extensions = ">=4.7.0,<5.0.0" [[package]] name = "pygments" -version = "2.17.2" +version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] [package.extras] -plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] [[package]] @@ -2022,13 +2141,13 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pytest" -version = "8.0.2" +version = "8.2.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.0.2-py3-none-any.whl", hash = "sha256:edfaaef32ce5172d5466b5127b42e0d6d35ebbe4453f0e3505d96afd93f6b096"}, - {file = "pytest-8.0.2.tar.gz", hash = "sha256:d4051d623a2e0b7e51960ba963193b09ce6daeb9759a451844a21e4ddedfc1bd"}, + {file = "pytest-8.2.1-py3-none-any.whl", hash = "sha256:faccc5d332b8c3719f40283d0d44aa5cf101cec36f88cde9ed8f2bc0538612b1"}, + {file = "pytest-8.2.1.tar.gz", hash = "sha256:5046e5b46d8e4cac199c373041f26be56fdb81eb4e67dc11d4e10811fc3408fd"}, ] [package.dependencies] @@ -2036,21 +2155,21 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.3.0,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.5,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" -version = "0.23.5" +version = "0.23.7" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-asyncio-0.23.5.tar.gz", hash = "sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675"}, - {file = "pytest_asyncio-0.23.5-py3-none-any.whl", hash = "sha256:4e7093259ba018d58ede7d5315131d21923a60f8a6e9ee266ce1589685c89eac"}, + {file = "pytest_asyncio-0.23.7-py3-none-any.whl", hash = "sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b"}, + {file = "pytest_asyncio-0.23.7.tar.gz", hash = "sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268"}, ] [package.dependencies] @@ -2062,13 +2181,13 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" -version = "4.1.0" +version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, ] [package.dependencies] @@ -2076,20 +2195,21 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-ruff" -version = "0.1.1" +version = "0.3.2" description = "pytest plugin to check ruff requirements." optional = false -python-versions = ">=3.7,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "pytest_ruff-0.1.1-py3-none-any.whl", hash = "sha256:db33c8d32d730d61d372c1ac4615b1036c47a14c781cbc0ae71811c4cadadc47"}, - {file = "pytest_ruff-0.1.1.tar.gz", hash = "sha256:f599768ff3834d6b1d6d26b25a030a5b1dcc9cf187239bd9621a7f25f7d8fe46"}, + {file = "pytest_ruff-0.3.2-py3-none-any.whl", hash = "sha256:5096578df2240b2a99f7376747bc433ce25e590c7d570d5c2b47f725497f2c10"}, + {file = "pytest_ruff-0.3.2.tar.gz", hash = "sha256:8d82882969e52b664a7cef4465cba63e45173f38d907dffeca41d9672f59b6c6"}, ] [package.dependencies] +pytest = ">=5" ruff = ">=0.0.242" [[package]] @@ -2117,22 +2237,6 @@ files = [ {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, ] -[[package]] -name = "python3-indy" -version = "1.16.0.post286" -description = "This is the official SDK for Hyperledger Indy (https://www.hyperledger.org/projects), which provides a distributed-ledger-based foundation for self-sovereign identity (https://sovrin.org). The major artifact of the SDK is a c-callable library." -optional = true -python-versions = "*" -files = [ - {file = "python3-indy-1.16.0-post-286.tar.gz", hash = "sha256:80e6a4241134ea3ef8b2554cffb11e504978f87edb004a1c965ec6eb063449a4"}, -] - -[package.dependencies] -base58 = "*" - -[package.extras] -test = ["base58", "pytest (<3.7)", "pytest-asyncio (==0.10.0)"] - [[package]] name = "pytz" version = "2021.1" @@ -2192,7 +2296,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -2272,48 +2375,48 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rlp" -version = "4.0.0" +version = "4.0.1" description = "rlp: A package for Recursive Length Prefix encoding and decoding" optional = false -python-versions = ">=3.8, <4" +python-versions = "<4,>=3.8" files = [ - {file = "rlp-4.0.0-py3-none-any.whl", hash = "sha256:1747fd933e054e6d25abfe591be92e19a4193a56c93981c05bd0f84dfe279f14"}, - {file = "rlp-4.0.0.tar.gz", hash = "sha256:61a5541f86e4684ab145cb849a5929d2ced8222930a570b3941cf4af16b72a78"}, + {file = "rlp-4.0.1-py3-none-any.whl", hash = "sha256:ff6846c3c27b97ee0492373aa074a7c3046aadd973320f4fffa7ac45564b0258"}, + {file = "rlp-4.0.1.tar.gz", hash = "sha256:bcefb11013dfadf8902642337923bd0c786dc8a27cb4c21da6e154e52869ecb1"}, ] [package.dependencies] eth-utils = ">=2" [package.extras] -dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "hypothesis (==5.19.0)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] -docs = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] -rust-backend = ["rusty-rlp (>=0.2.1,<0.3)"] +dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "hypothesis (==5.19.0)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] +docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] +rust-backend = ["rusty-rlp (>=0.2.1)"] test = ["hypothesis (==5.19.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "ruff" -version = "0.1.2" -description = "An extremely fast Python linter, written in Rust." +version = "0.4.4" +description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.2-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:0d3ee66b825b713611f89aa35d16de984f76f26c50982a25d52cd0910dff3923"}, - {file = "ruff-0.1.2-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:f85f850a320ff532b8f93e8d1da6a36ef03698c446357c8c43b46ef90bb321eb"}, - {file = "ruff-0.1.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:809c6d4e45683696d19ca79e4c6bd3b2e9204fe9546923f2eb3b126ec314b0dc"}, - {file = "ruff-0.1.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46005e4abb268e93cad065244e17e2ea16b6fcb55a5c473f34fbc1fd01ae34cb"}, - {file = "ruff-0.1.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10cdb302f519664d5e2cf954562ac86c9d20ca05855e5b5c2f9d542228f45da4"}, - {file = "ruff-0.1.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f89ebcbe57a1eab7d7b4ceb57ddf0af9ed13eae24e443a7c1dc078000bd8cc6b"}, - {file = "ruff-0.1.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7344eaca057d4c32373c9c3a7afb7274f56040c225b6193dd495fcf69453b436"}, - {file = "ruff-0.1.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dffa25f6e03c4950b6ac6f216bc0f98a4be9719cb0c5260c8e88d1bac36f1683"}, - {file = "ruff-0.1.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42ddaea52cb7ba7c785e8593a7532866c193bc774fe570f0e4b1ccedd95b83c5"}, - {file = "ruff-0.1.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8533efda625bbec0bf27da2886bd641dae0c209104f6c39abc4be5b7b22de2a"}, - {file = "ruff-0.1.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b0b1b82221ba7c50e03b7a86b983157b5d3f4d8d4f16728132bdf02c6d651f77"}, - {file = "ruff-0.1.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c1362eb9288f8cc95535294cb03bd4665c8cef86ec32745476a4e5c6817034c"}, - {file = "ruff-0.1.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ffa7ef5ded0563329a35bd5a1cfdae40f05a75c0cc2dd30f00b1320b1fb461fc"}, - {file = "ruff-0.1.2-py3-none-win32.whl", hash = "sha256:6e8073f85e47072256e2e1909f1ae515cf61ff5a4d24730a63b8b4ac24b6704a"}, - {file = "ruff-0.1.2-py3-none-win_amd64.whl", hash = "sha256:b836ddff662a45385948ee0878b0a04c3a260949905ad861a37b931d6ee1c210"}, - {file = "ruff-0.1.2-py3-none-win_arm64.whl", hash = "sha256:b0c42d00db5639dbd5f7f9923c63648682dd197bf5de1151b595160c96172691"}, - {file = "ruff-0.1.2.tar.gz", hash = "sha256:afd4785ae060ce6edcd52436d0c197628a918d6d09e3107a892a1bad6a4c6608"}, + {file = "ruff-0.4.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:29d44ef5bb6a08e235c8249294fa8d431adc1426bfda99ed493119e6f9ea1bf6"}, + {file = "ruff-0.4.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c4efe62b5bbb24178c950732ddd40712b878a9b96b1d02b0ff0b08a090cbd891"}, + {file = "ruff-0.4.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c8e2f1e8fc12d07ab521a9005d68a969e167b589cbcaee354cb61e9d9de9c15"}, + {file = "ruff-0.4.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60ed88b636a463214905c002fa3eaab19795679ed55529f91e488db3fe8976ab"}, + {file = "ruff-0.4.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b90fc5e170fc71c712cc4d9ab0e24ea505c6a9e4ebf346787a67e691dfb72e85"}, + {file = "ruff-0.4.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8e7e6ebc10ef16dcdc77fd5557ee60647512b400e4a60bdc4849468f076f6eef"}, + {file = "ruff-0.4.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9ddb2c494fb79fc208cd15ffe08f32b7682519e067413dbaf5f4b01a6087bcd"}, + {file = "ruff-0.4.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c51c928a14f9f0a871082603e25a1588059b7e08a920f2f9fa7157b5bf08cfe9"}, + {file = "ruff-0.4.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5eb0a4bfd6400b7d07c09a7725e1a98c3b838be557fee229ac0f84d9aa49c36"}, + {file = "ruff-0.4.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b1867ee9bf3acc21778dcb293db504692eda5f7a11a6e6cc40890182a9f9e595"}, + {file = "ruff-0.4.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1aecced1269481ef2894cc495647392a34b0bf3e28ff53ed95a385b13aa45768"}, + {file = "ruff-0.4.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9da73eb616b3241a307b837f32756dc20a0b07e2bcb694fec73699c93d04a69e"}, + {file = "ruff-0.4.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:958b4ea5589706a81065e2a776237de2ecc3e763342e5cc8e02a4a4d8a5e6f95"}, + {file = "ruff-0.4.4-py3-none-win32.whl", hash = "sha256:cb53473849f011bca6e754f2cdf47cafc9c4f4ff4570003a0dad0b9b6890e876"}, + {file = "ruff-0.4.4-py3-none-win_amd64.whl", hash = "sha256:424e5b72597482543b684c11def82669cc6b395aa8cc69acc1858b5ef3e5daae"}, + {file = "ruff-0.4.4-py3-none-win_arm64.whl", hash = "sha256:39df0537b47d3b597293edbb95baf54ff5b49589eb7ff41926d8243caa995ea6"}, + {file = "ruff-0.4.4.tar.gz", hash = "sha256:f87ea42d5cdebdc6a69761a9d0bc83ae9b3b30d0ad78952005ba6568d6c022af"}, ] [[package]] @@ -2333,126 +2436,18 @@ pyyaml = ">=5.4" [[package]] name = "setuptools" -version = "69.1.1" +version = "70.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, - {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, + {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, + {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "simplejson" -version = "3.19.2" -description = "Simple, fast, extensible JSON encoder/decoder for Python" -optional = false -python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "simplejson-3.19.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3471e95110dcaf901db16063b2e40fb394f8a9e99b3fe9ee3acc6f6ef72183a2"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:3194cd0d2c959062b94094c0a9f8780ffd38417a5322450a0db0ca1a23e7fbd2"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:8a390e56a7963e3946ff2049ee1eb218380e87c8a0e7608f7f8790ba19390867"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1537b3dd62d8aae644f3518c407aa8469e3fd0f179cdf86c5992792713ed717a"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a8617625369d2d03766413bff9e64310feafc9fc4f0ad2b902136f1a5cd8c6b0"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:2c433a412e96afb9a3ce36fa96c8e61a757af53e9c9192c97392f72871e18e69"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:f1c70249b15e4ce1a7d5340c97670a95f305ca79f376887759b43bb33288c973"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:287e39ba24e141b046812c880f4619d0ca9e617235d74abc27267194fc0c7835"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6f0a0b41dd05eefab547576bed0cf066595f3b20b083956b1405a6f17d1be6ad"}, - {file = "simplejson-3.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f98d918f7f3aaf4b91f2b08c0c92b1774aea113334f7cde4fe40e777114dbe6"}, - {file = "simplejson-3.19.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d74beca677623481810c7052926365d5f07393c72cbf62d6cce29991b676402"}, - {file = "simplejson-3.19.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7f2398361508c560d0bf1773af19e9fe644e218f2a814a02210ac2c97ad70db0"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ad331349b0b9ca6da86064a3599c425c7a21cd41616e175ddba0866da32df48"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:332c848f02d71a649272b3f1feccacb7e4f7e6de4a2e6dc70a32645326f3d428"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25785d038281cd106c0d91a68b9930049b6464288cea59ba95b35ee37c2d23a5"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18955c1da6fc39d957adfa346f75226246b6569e096ac9e40f67d102278c3bcb"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:11cc3afd8160d44582543838b7e4f9aa5e97865322844b75d51bf4e0e413bb3e"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b01fda3e95d07a6148702a641e5e293b6da7863f8bc9b967f62db9461330562c"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:778331444917108fa8441f59af45886270d33ce8a23bfc4f9b192c0b2ecef1b3"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9eb117db8d7ed733a7317c4215c35993b815bf6aeab67523f1f11e108c040672"}, - {file = "simplejson-3.19.2-cp310-cp310-win32.whl", hash = "sha256:39b6d79f5cbfa3eb63a869639cfacf7c41d753c64f7801efc72692c1b2637ac7"}, - {file = "simplejson-3.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:5675e9d8eeef0aa06093c1ff898413ade042d73dc920a03e8cea2fb68f62445a"}, - {file = "simplejson-3.19.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ed628c1431100b0b65387419551e822987396bee3c088a15d68446d92f554e0c"}, - {file = "simplejson-3.19.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:adcb3332979cbc941b8fff07181f06d2b608625edc0a4d8bc3ffc0be414ad0c4"}, - {file = "simplejson-3.19.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08889f2f597ae965284d7b52a5c3928653a9406d88c93e3161180f0abc2433ba"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7938a78447174e2616be223f496ddccdbf7854f7bf2ce716dbccd958cc7d13"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a970a2e6d5281d56cacf3dc82081c95c1f4da5a559e52469287457811db6a79b"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554313db34d63eac3b3f42986aa9efddd1a481169c12b7be1e7512edebff8eaf"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d36081c0b1c12ea0ed62c202046dca11438bee48dd5240b7c8de8da62c620e9"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a3cd18e03b0ee54ea4319cdcce48357719ea487b53f92a469ba8ca8e39df285e"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:66e5dc13bfb17cd6ee764fc96ccafd6e405daa846a42baab81f4c60e15650414"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:972a7833d4a1fcf7a711c939e315721a88b988553fc770a5b6a5a64bd6ebeba3"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3e74355cb47e0cd399ead3477e29e2f50e1540952c22fb3504dda0184fc9819f"}, - {file = "simplejson-3.19.2-cp311-cp311-win32.whl", hash = "sha256:1dd4f692304854352c3e396e9b5f0a9c9e666868dd0bdc784e2ac4c93092d87b"}, - {file = "simplejson-3.19.2-cp311-cp311-win_amd64.whl", hash = "sha256:9300aee2a8b5992d0f4293d88deb59c218989833e3396c824b69ba330d04a589"}, - {file = "simplejson-3.19.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b8d940fd28eb34a7084877747a60873956893e377f15a32ad445fe66c972c3b8"}, - {file = "simplejson-3.19.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4969d974d9db826a2c07671273e6b27bc48e940738d768fa8f33b577f0978378"}, - {file = "simplejson-3.19.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c594642d6b13d225e10df5c16ee15b3398e21a35ecd6aee824f107a625690374"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2f5a398b5e77bb01b23d92872255e1bcb3c0c719a3be40b8df146570fe7781a"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176a1b524a3bd3314ed47029a86d02d5a95cc0bee15bd3063a1e1ec62b947de6"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3c7363a8cb8c5238878ec96c5eb0fc5ca2cb11fc0c7d2379863d342c6ee367a"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:346820ae96aa90c7d52653539a57766f10f33dd4be609206c001432b59ddf89f"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de9a2792612ec6def556d1dc621fd6b2073aff015d64fba9f3e53349ad292734"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1c768e7584c45094dca4b334af361e43b0aaa4844c04945ac7d43379eeda9bc2"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:9652e59c022e62a5b58a6f9948b104e5bb96d3b06940c6482588176f40f4914b"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9c1a4393242e321e344213a90a1e3bf35d2f624aa8b8f6174d43e3c6b0e8f6eb"}, - {file = "simplejson-3.19.2-cp312-cp312-win32.whl", hash = "sha256:7cb98be113911cb0ad09e5523d0e2a926c09a465c9abb0784c9269efe4f95917"}, - {file = "simplejson-3.19.2-cp312-cp312-win_amd64.whl", hash = "sha256:6779105d2fcb7fcf794a6a2a233787f6bbd4731227333a072d8513b252ed374f"}, - {file = "simplejson-3.19.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:061e81ea2d62671fa9dea2c2bfbc1eec2617ae7651e366c7b4a2baf0a8c72cae"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4280e460e51f86ad76dc456acdbfa9513bdf329556ffc8c49e0200878ca57816"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11c39fbc4280d7420684494373b7c5904fa72a2b48ef543a56c2d412999c9e5d"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bccb3e88ec26ffa90f72229f983d3a5d1155e41a1171190fa723d4135523585b"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb5b50dc6dd671eb46a605a3e2eb98deb4a9af787a08fcdddabe5d824bb9664"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d94245caa3c61f760c4ce4953cfa76e7739b6f2cbfc94cc46fff6c050c2390c5"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d0e5ffc763678d48ecc8da836f2ae2dd1b6eb2d27a48671066f91694e575173c"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d222a9ed082cd9f38b58923775152003765016342a12f08f8c123bf893461f28"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8434dcdd347459f9fd9c526117c01fe7ca7b016b6008dddc3c13471098f4f0dc"}, - {file = "simplejson-3.19.2-cp36-cp36m-win32.whl", hash = "sha256:c9ac1c2678abf9270e7228133e5b77c6c3c930ad33a3c1dfbdd76ff2c33b7b50"}, - {file = "simplejson-3.19.2-cp36-cp36m-win_amd64.whl", hash = "sha256:92c4a4a2b1f4846cd4364855cbac83efc48ff5a7d7c06ba014c792dd96483f6f"}, - {file = "simplejson-3.19.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0d551dc931638e2102b8549836a1632e6e7cf620af3d093a7456aa642bff601d"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73a8a4653f2e809049999d63530180d7b5a344b23a793502413ad1ecea9a0290"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40847f617287a38623507d08cbcb75d51cf9d4f9551dd6321df40215128325a3"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be893258d5b68dd3a8cba8deb35dc6411db844a9d35268a8d3793b9d9a256f80"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9eb3cff1b7d71aa50c89a0536f469cb8d6dcdd585d8f14fb8500d822f3bdee4"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d0f402e787e6e7ee7876c8b05e2fe6464820d9f35ba3f172e95b5f8b699f6c7f"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fbbcc6b0639aa09b9649f36f1bcb347b19403fe44109948392fbb5ea69e48c3e"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:2fc697be37585eded0c8581c4788fcfac0e3f84ca635b73a5bf360e28c8ea1a2"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b0a3eb6dd39cce23801a50c01a0976971498da49bc8a0590ce311492b82c44b"}, - {file = "simplejson-3.19.2-cp37-cp37m-win32.whl", hash = "sha256:49f9da0d6cd17b600a178439d7d2d57c5ef01f816b1e0e875e8e8b3b42db2693"}, - {file = "simplejson-3.19.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c87c22bd6a987aca976e3d3e23806d17f65426191db36d40da4ae16a6a494cbc"}, - {file = "simplejson-3.19.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e4c166f743bb42c5fcc60760fb1c3623e8fda94f6619534217b083e08644b46"}, - {file = "simplejson-3.19.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0a48679310e1dd5c9f03481799311a65d343748fe86850b7fb41df4e2c00c087"}, - {file = "simplejson-3.19.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0521e0f07cb56415fdb3aae0bbd8701eb31a9dfef47bb57206075a0584ab2a2"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d2d5119b1d7a1ed286b8af37357116072fc96700bce3bec5bb81b2e7057ab41"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c1467d939932901a97ba4f979e8f2642415fcf02ea12f53a4e3206c9c03bc17"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49aaf4546f6023c44d7e7136be84a03a4237f0b2b5fb2b17c3e3770a758fc1a0"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60848ab779195b72382841fc3fa4f71698a98d9589b0a081a9399904487b5832"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0436a70d8eb42bea4fe1a1c32d371d9bb3b62c637969cb33970ad624d5a3336a"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:49e0e3faf3070abdf71a5c80a97c1afc059b4f45a5aa62de0c2ca0444b51669b"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ff836cd4041e16003549449cc0a5e372f6b6f871eb89007ab0ee18fb2800fded"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3848427b65e31bea2c11f521b6fc7a3145d6e501a1038529da2391aff5970f2f"}, - {file = "simplejson-3.19.2-cp38-cp38-win32.whl", hash = "sha256:3f39bb1f6e620f3e158c8b2eaf1b3e3e54408baca96a02fe891794705e788637"}, - {file = "simplejson-3.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:0405984f3ec1d3f8777c4adc33eac7ab7a3e629f3b1c05fdded63acc7cf01137"}, - {file = "simplejson-3.19.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:445a96543948c011a3a47c8e0f9d61e9785df2544ea5be5ab3bc2be4bd8a2565"}, - {file = "simplejson-3.19.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a8c3cc4f9dfc33220246760358c8265dad6e1104f25f0077bbca692d616d358"}, - {file = "simplejson-3.19.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af9c7e6669c4d0ad7362f79cb2ab6784d71147503e62b57e3d95c4a0f222c01c"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:064300a4ea17d1cd9ea1706aa0590dcb3be81112aac30233823ee494f02cb78a"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9453419ea2ab9b21d925d0fd7e3a132a178a191881fab4169b6f96e118cc25bb"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e038c615b3906df4c3be8db16b3e24821d26c55177638ea47b3f8f73615111c"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16ca9c90da4b1f50f089e14485db8c20cbfff2d55424062791a7392b5a9b3ff9"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1018bd0d70ce85f165185d2227c71e3b1e446186f9fa9f971b69eee223e1e3cd"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e8dd53a8706b15bc0e34f00e6150fbefb35d2fd9235d095b4f83b3c5ed4fa11d"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:2d022b14d7758bfb98405672953fe5c202ea8a9ccf9f6713c5bd0718eba286fd"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:febffa5b1eda6622d44b245b0685aff6fb555ce0ed734e2d7b1c3acd018a2cff"}, - {file = "simplejson-3.19.2-cp39-cp39-win32.whl", hash = "sha256:4edcd0bf70087b244ba77038db23cd98a1ace2f91b4a3ecef22036314d77ac23"}, - {file = "simplejson-3.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:aad7405c033d32c751d98d3a65801e2797ae77fac284a539f6c3a3e13005edc4"}, - {file = "simplejson-3.19.2-py3-none-any.whl", hash = "sha256:bcedf4cae0d47839fee7de344f96b5694ca53c786f28b5f773d4f0b265a159eb"}, - {file = "simplejson-3.19.2.tar.gz", hash = "sha256:9eb442a2442ce417801c912df68e1f6ccfcd41577ae7274953ab3ad24ef7d82c"}, -] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -2581,13 +2576,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.12.1" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.12.1-py3-none-any.whl", hash = "sha256:6024b58b69089e5a89c347397254e35f1bf02a907728ec7fee9bf0fe837d203a"}, + {file = "typing_extensions-4.12.1.tar.gz", hash = "sha256:915f5e35ff76f56588223f15fdd5938f9a1cf9195c0de25130c627e4d597f6d1"}, ] [[package]] @@ -2630,15 +2625,107 @@ files = [ {file = "ursa_bbs_signatures-1.0.1-py3-none-win_amd64.whl", hash = "sha256:ffd5f8cf1518c706b372feccac5d727a9d6c64a68f54f4d109133c4101108368"}, ] +[[package]] +name = "uuid-utils" +version = "0.7.0" +description = "Drop-in replacement for Python UUID in Rust" +optional = false +python-versions = ">=3.8" +files = [ + {file = "uuid_utils-0.7.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:1813869ffbf82ebe5fbe749cf0d5e580c605b0fd65d5e738e44439578280f993"}, + {file = "uuid_utils-0.7.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:afb6d3cea6f8b1d9692a1c5d7a93aa6189f973509ea272f4c070399e88cea36b"}, + {file = "uuid_utils-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38af087e1804774f563ff5f9f043022274dfce110b721ca272f89c0de4ee44e1"}, + {file = "uuid_utils-0.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:183603176b65401492db51a16526360997c91e32bc1ffe20ee527337fc57f634"}, + {file = "uuid_utils-0.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cebc0e99853c6c12f42e509c27af6131ef36b29e6f381d53c6d81eb1bd21a5f4"}, + {file = "uuid_utils-0.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49e0a42bd9c3825f10d38dcc49bafe5b6543b6c107e4b614e96abf8a7cd58a6f"}, + {file = "uuid_utils-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a0f978aa8a51ca05142e4e81767d67de08b35ce7db28bc2e600d0c317472013"}, + {file = "uuid_utils-0.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3d2d02868c73334e84d80a7ad60e6c7506c72c059508e9a38db453e4110a652"}, + {file = "uuid_utils-0.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:03f710c032d903f273c720dfc080b68fead1ed543de8ad53c4c8dde64c6edd56"}, + {file = "uuid_utils-0.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b60c49becd9ff3844fe6e0e87319df9c84dd65bb86c36ad3514981f64e7a737a"}, + {file = "uuid_utils-0.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c7ae618dbe27eb5c681a09bec4554d8da8264130a083657fcb80033bbf1c6114"}, + {file = "uuid_utils-0.7.0-cp310-none-win32.whl", hash = "sha256:fb73e36a209c2b585e878748615c0410d2422908ad86fc12b5ae66fedd7e326d"}, + {file = "uuid_utils-0.7.0-cp310-none-win_amd64.whl", hash = "sha256:8e30075e257184328356436a8a6b0e5a0c2b097c224a1e7f9d98a4c350ae5f21"}, + {file = "uuid_utils-0.7.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:ca41e673b807405c0c5aa97ff8959b80884734b1eb55428c7285de245aa3e101"}, + {file = "uuid_utils-0.7.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cac7e2cf5b40ef297a998fc3ede146f171f99b18210e1237f01002c7e3fa6b0b"}, + {file = "uuid_utils-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bad486bcb3b1bd1f6a6e02d9627c51b993305bd2efd3eb4acd0aff529cd7d43"}, + {file = "uuid_utils-0.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd9d769f85bd24a558e8d1aee93400811e3f734199acc5410617f67b1041e0f4"}, + {file = "uuid_utils-0.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c99930f6d51efd15b6c2feb73b386bffccfc82c535eb7d8229e4fb6467f5c6c"}, + {file = "uuid_utils-0.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c68ba81b63e23032beda93eeab084f76f141017a26cb895c65777cf3c6c3474"}, + {file = "uuid_utils-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdaa67667584aba2096292607e2f2e4485df1d1fb2594b2390227cf18df057f0"}, + {file = "uuid_utils-0.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6506fedaacd814b50cb62745b058796612c0ddd818a35a70082ea76f8b484931"}, + {file = "uuid_utils-0.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eaa55deae8fd4e7ff30a31f1661e953d70705efa3b09d0fc33576a8eaa589910"}, + {file = "uuid_utils-0.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0d4e7cd2f45e9a3dd371abb8532c6fcbb9befa1551522336095b02369e9144a9"}, + {file = "uuid_utils-0.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d6a35a2205318cff201e76cbc6ad428c58e4d9d9ce9c83fd600c5295538be60e"}, + {file = "uuid_utils-0.7.0-cp311-none-win32.whl", hash = "sha256:a7c82f88158f0693cfbc769536d7c09a7cd3c58b22a1b2a041374db1ba03e2d3"}, + {file = "uuid_utils-0.7.0-cp311-none-win_amd64.whl", hash = "sha256:df8f82270295726d1f7d1e26026c29d33a2b40e6dcf8723cf7f5809909eaf6d6"}, + {file = "uuid_utils-0.7.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:53e5d6703f6a38aa1ba59cf8ac0486ac9a847e816e638cf9d6a2a4da4e9f6247"}, + {file = "uuid_utils-0.7.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c29183a8baedb39fc89e3d98ed2427d49e97ff3680f6832bffe73568d594970d"}, + {file = "uuid_utils-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:253fd6e8962008484e02fd4ff4a77ffbddd3867c0c3c24a6919eb4fefc3a2297"}, + {file = "uuid_utils-0.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de53537159212608eb15d4948d0e0098d2fa2b30d453f93d83fe737f0fd7188b"}, + {file = "uuid_utils-0.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:116c4b2ff774ce552324b196a3222302a2e78479a301fdb11c2aa1d294ab0f4d"}, + {file = "uuid_utils-0.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2eafb4fe02270e22a3bdb03c2107604cf68589a965667cabb71789beed318497"}, + {file = "uuid_utils-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ab7a012a1514e498f3f537852257ad2ec9402d1cc165865108dc6d9496bbd4"}, + {file = "uuid_utils-0.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:08d58f7de04f3c43a4da05eece58002f4028a7275775ad5013e010abd51d7238"}, + {file = "uuid_utils-0.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7e349d43a969f696dbc7acd002b64952b71674eaf948043a4c6dd1ab65d7c462"}, + {file = "uuid_utils-0.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:53f4c96e7fd1dab33dd56a885d9cffb5aaf21a9064115743e2cee1ff03cb359b"}, + {file = "uuid_utils-0.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c145629c4e48cda275310955632a8231c031f5e9b2eb93b9ab8a081dc6ab6681"}, + {file = "uuid_utils-0.7.0-cp312-none-win_amd64.whl", hash = "sha256:2ca368440148049475ff94f62d5011c34cd7954fe36247698fc05658d04ad9a1"}, + {file = "uuid_utils-0.7.0-cp38-cp38-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:48ed8e59c6fdcc8f825e9fa58afc7f98ba37f744a401ff28a47e7042a761b373"}, + {file = "uuid_utils-0.7.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:bb2777eb2837fc88aceb09addb45bfc7bc8dd0058d19627867b459dac3101a4b"}, + {file = "uuid_utils-0.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:070254d2435e9f187e0e8c0626fc6ed108d308cdec669c6d1493dd117bfbedd1"}, + {file = "uuid_utils-0.7.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:424abbbf7e8bdfe78ab552d838efeb9fd033cfe2208f00aadee2704169a1ebad"}, + {file = "uuid_utils-0.7.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:884a72b5f87f7534b685382221d872058bb743294cdb0f2215056b6cc85350fb"}, + {file = "uuid_utils-0.7.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab1509e21c74feb68b4a3e309bde8c64a8fce2e4552b79cb14058d6bc17a6129"}, + {file = "uuid_utils-0.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e6d70efc5e3449f0be3184a6925d0feb29fe40bdcd24ee2611a9021ee9b2580"}, + {file = "uuid_utils-0.7.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:411e29b3a2de713c4a3f3edc653599fb17ef3f38b6a788fecef62c3f229b7b0e"}, + {file = "uuid_utils-0.7.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3bf10bd5a898d72f50183718ca18bd61b8830c9134469b4d7b9f73f176f06c9f"}, + {file = "uuid_utils-0.7.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:247af7258004f497ec927fcf463914df5447eb691d7e9c23528280c471d6e830"}, + {file = "uuid_utils-0.7.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:01f7c73860b3cef024f9f57515dae5d52a554c3d2480d8410174ec5b609e20f5"}, + {file = "uuid_utils-0.7.0-cp38-none-win32.whl", hash = "sha256:d90d432c85bb2d9b3d67c8483b1134cf4363a39fa3273b8f05dcfde2bdddfc5d"}, + {file = "uuid_utils-0.7.0-cp38-none-win_amd64.whl", hash = "sha256:d31ebe0e6d5d1210da259de4d04ee31dfd5407296302bc2dfcca941e3e8f7bee"}, + {file = "uuid_utils-0.7.0-cp39-cp39-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:076fe5f6e5295a5d47b240ece6047d25ce15e8a114f60acc51b4025c3b973ed9"}, + {file = "uuid_utils-0.7.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:997f4d4f505391b69373c852662b5fe0af8c17b71fe401fea7687261464b9aa5"}, + {file = "uuid_utils-0.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59fc7ce3dddb5694f6ecd427d557a342f44075cdaf836cd99033fd0cc500e592"}, + {file = "uuid_utils-0.7.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:463b98c24c5f6f4d0b46174c1068c19007fe6414c38fbd58d5cb6c8d29cdd1ef"}, + {file = "uuid_utils-0.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65c5d33fd056517d0ab1624168359371b012cc6e3a0fd6029d212d3973032e90"}, + {file = "uuid_utils-0.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da47c5c4348a5f88749ac8fd54715bdfa18c1317ebf709121721e9b5fb338c66"}, + {file = "uuid_utils-0.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04f39fd90656770422cc7ec46467c2eb758e19d70c5844770bd67834ebae40ea"}, + {file = "uuid_utils-0.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a5817e38d497ae643c68044c5c84153fa47557df1f8c1661c17bd1e26bda1058"}, + {file = "uuid_utils-0.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:407c15bbde425bc4df829771ef601260eda8617ac5adc6f1eb924d916674c34f"}, + {file = "uuid_utils-0.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d4ac00e7f3bbb578e20fadf81468f28b63d1b29930192d8285e9d01b2f75f270"}, + {file = "uuid_utils-0.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6d937d37b696a2e3346171367a6ecf69519af4f2a5325e8e7f9a7cfb61597387"}, + {file = "uuid_utils-0.7.0-cp39-none-win32.whl", hash = "sha256:a4fd826bc2c260716b53db90b2e4c8a0f752aae053fbfbd1860e6e450bcf6ae9"}, + {file = "uuid_utils-0.7.0-cp39-none-win_amd64.whl", hash = "sha256:c1aa084a1b4842c49526ed1189122a96a8cdd73f66ef4219956279044bf6721f"}, + {file = "uuid_utils-0.7.0-pp38-pypy38_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:15eb3621d24fb6aab7f8e7b315356171795ca0f226ba9c31490fb9c08712c201"}, + {file = "uuid_utils-0.7.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bd0dac47317dcdefafe493428237019582ba8adb91c3ec80e033ee631c173f6d"}, + {file = "uuid_utils-0.7.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7b555e485f17ab1ab0cb963ff48c6404b93dd491aef7f52a8ae8c52f7f51841"}, + {file = "uuid_utils-0.7.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e66bddd9a469645ede16f0abde5db4dd1a75bc9628ab0b68cad0b848de8494aa"}, + {file = "uuid_utils-0.7.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ad6957427be8f2e48d2f128b3382b3c8e33b4b26542d757e5957c9593773082"}, + {file = "uuid_utils-0.7.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c753f5b690a481d31f13668a57610a4ee9805d0bd4515ab74a3766bea3b0e66"}, + {file = "uuid_utils-0.7.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e99615eb01550e1f883b5b251a04e8afe053dd30fb6c1af823bd14841bd9290"}, + {file = "uuid_utils-0.7.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cb241970c10cccd37ecac5b3759276ca499cb5b639b832167f91b0a98383e89d"}, + {file = "uuid_utils-0.7.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:14b672b950e792545fde222cf08f9ba9e30ac69399c2ca34b91d4fa457ce1528"}, + {file = "uuid_utils-0.7.0-pp39-pypy39_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:1229b9849a239714899040f8af9c7b3b7ad790483ac0bdf06982eb03383e7a93"}, + {file = "uuid_utils-0.7.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2b6c56101e5dedf06c81c5f3e3dc9d542feb4a5443b01a100c14eef6ae7e9ec4"}, + {file = "uuid_utils-0.7.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77c7f5e54fad8d761e019122080b14fae9568dd09cbb908f349284efa8f9a792"}, + {file = "uuid_utils-0.7.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b429a906f0dff1c35d55ca17c5f7fedf3149cb405808b43ba4f3a6d21732c31"}, + {file = "uuid_utils-0.7.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:06608c7d643149dee92ceebc73a84bb736d4394f200ecb794541a79e10bc482d"}, + {file = "uuid_utils-0.7.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:655e505c4e7c321e7f60572fdd594bdfdd96556a9699f697045e3d0b4699f30a"}, + {file = "uuid_utils-0.7.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1e59447b45d5988572e450f43de5546e1d2f6643d2e0137d83b5fdad204fd05"}, + {file = "uuid_utils-0.7.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f3010bdaff5c2a78980849aa6b082e7a0013949c8e4d317934f4aaacf14a2d22"}, + {file = "uuid_utils-0.7.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ec25fadeeb34c41ef95a8b849a3e4dcc39e96eb39367323ba873bc1732d6516a"}, + {file = "uuid_utils-0.7.0.tar.gz", hash = "sha256:015aa22711ffd57c5001c2477c6a40121db2794ae3be181a0bf79eef80e28943"}, +] + [[package]] name = "virtualenv" -version = "20.25.1" +version = "20.26.2" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, - {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, + {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, + {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, ] [package.dependencies] @@ -2647,7 +2734,7 @@ filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] @@ -2663,26 +2750,25 @@ files = [ [[package]] name = "webargs" -version = "5.5.3" -description = "Declarative parsing and validation of HTTP request objects, with built-in support for popular web frameworks, including Flask, Django, Bottle, Tornado, Pyramid, webapp2, Falcon, and aiohttp." +version = "8.4.0" +description = "Declarative parsing and validation of HTTP request objects, with built-in support for popular web frameworks, including Flask, Django, Bottle, Tornado, Pyramid, Falcon, and aiohttp." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "webargs-5.5.3-py2-none-any.whl", hash = "sha256:fc81c9f9d391acfbce406a319217319fd8b2fd862f7fdb5319ad06944f36ed25"}, - {file = "webargs-5.5.3-py3-none-any.whl", hash = "sha256:4f04918864c7602886335d8099f9b8960ee698b6b914f022736ed50be6b71235"}, - {file = "webargs-5.5.3.tar.gz", hash = "sha256:871642a2e0c62f21d5b78f357750ac7a87e6bc734c972f633aa5fb6204fbf29a"}, + {file = "webargs-8.4.0-py3-none-any.whl", hash = "sha256:22324305fbca6a2c4cce1235280e8b56372fb3211a8dac2ac8ed1948315a6f53"}, + {file = "webargs-8.4.0.tar.gz", hash = "sha256:ea99368214a4ce613924be99d71db58c269631e95eff4fa09b7354e52dc006a5"}, ] [package.dependencies] -marshmallow = ">=2.15.2" -simplejson = ">=2.1.0" +marshmallow = ">=3.0.0" +packaging = "*" [package.extras] -dev = ["Django (>=1.11.16)", "Flask (>=0.12.2)", "aiohttp (>=3.0.0)", "bottle (>=0.12.13)", "falcon (>=1.4.0,<2.0)", "flake8 (==3.7.8)", "flake8-bugbear (==19.8.0)", "mock", "mypy (==0.730)", "pre-commit (>=1.17,<2.0)", "pyramid (>=1.9.1)", "pytest", "pytest-aiohttp (>=0.3.0)", "tornado (>=4.5.2)", "tox", "webapp2 (>=3.0.0b1)", "webtest (==2.0.33)", "webtest-aiohttp (==2.0.0)"] -docs = ["Django (>=1.11.16)", "Flask (>=0.12.2)", "Sphinx (==2.2.0)", "aiohttp (>=3.0.0)", "bottle (>=0.12.13)", "falcon (>=1.4.0,<2.0)", "pyramid (>=1.9.1)", "sphinx-issues (==1.2.0)", "sphinx-typlog-theme (==0.7.3)", "tornado (>=4.5.2)", "webapp2 (>=3.0.0b1)"] -frameworks = ["Django (>=1.11.16)", "Flask (>=0.12.2)", "aiohttp (>=3.0.0)", "bottle (>=0.12.13)", "falcon (>=1.4.0,<2.0)", "pyramid (>=1.9.1)", "tornado (>=4.5.2)", "webapp2 (>=3.0.0b1)"] -lint = ["flake8 (==3.7.8)", "flake8-bugbear (==19.8.0)", "mypy (==0.730)", "pre-commit (>=1.17,<2.0)"] -tests = ["Django (>=1.11.16)", "Flask (>=0.12.2)", "aiohttp (>=3.0.0)", "bottle (>=0.12.13)", "falcon (>=1.4.0,<2.0)", "mock", "pyramid (>=1.9.1)", "pytest", "pytest-aiohttp (>=0.3.0)", "tornado (>=4.5.2)", "webapp2 (>=3.0.0b1)", "webtest (==2.0.33)", "webtest-aiohttp (==2.0.0)"] +dev = ["Django (>=2.2.0)", "Flask (>=0.12.5)", "aiohttp (>=3.0.8)", "bottle (>=0.12.13)", "falcon (>=2.0.0)", "flake8 (==7.0.0)", "flake8-bugbear (==23.12.2)", "mypy (==1.8.0)", "pre-commit (>=2.4,<4.0)", "pyramid (>=1.9.1)", "pytest", "pytest-aiohttp (>=0.3.0)", "pytest-asyncio", "tornado (>=4.5.2)", "tox", "webtest (==3.0.0)", "webtest-aiohttp (==2.0.0)"] +docs = ["Django (>=2.2.0)", "Flask (>=0.12.5)", "Sphinx (==7.2.6)", "aiohttp (>=3.0.8)", "bottle (>=0.12.13)", "falcon (>=2.0.0)", "furo (==2023.9.10)", "pyramid (>=1.9.1)", "sphinx-issues (==3.0.1)", "tornado (>=4.5.2)"] +frameworks = ["Django (>=2.2.0)", "Flask (>=0.12.5)", "aiohttp (>=3.0.8)", "bottle (>=0.12.13)", "falcon (>=2.0.0)", "pyramid (>=1.9.1)", "tornado (>=4.5.2)"] +lint = ["flake8 (==7.0.0)", "flake8-bugbear (==23.12.2)", "mypy (==1.8.0)", "pre-commit (>=2.4,<4.0)"] +tests = ["Django (>=2.2.0)", "Flask (>=0.12.5)", "aiohttp (>=3.0.8)", "bottle (>=0.12.13)", "falcon (>=2.0.0)", "pyramid (>=1.9.1)", "pytest", "pytest-aiohttp (>=0.3.0)", "pytest-asyncio", "tornado (>=4.5.2)", "webtest (==3.0.0)", "webtest-aiohttp (==2.0.0)"] [[package]] name = "yarl" @@ -2789,25 +2875,24 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.17.0" +version = "3.19.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"}, + {file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [extras] askar = ["anoncreds", "aries-askar", "indy-credx", "indy-vdr"] bbs = ["ursa-bbs-signatures"] -indy = ["python3-indy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "6aef813877ebf9ed3259ae2dbe0efbe2a0b7f0192a2f3881f52336c4c05c8c8b" +content-hash = "ebf6f9d048937a52695115bf2aa1b2f71521116ccb5d9429f209aee041b91446" diff --git a/pyproject.toml b/pyproject.toml index f5b83f8c64..ec910e97ed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,23 +17,23 @@ repository = "https://github.com/hyperledger/aries-cloudagent-python" [tool.poetry.dependencies] python = "^3.9" aiohttp="~3.9.4" -aiohttp-apispec="~2.2.1" +aiohttp-apispec = { git = "https://github.com/ff137/aiohttp-apispec.git", tag = "v3.0.1" } aiohttp-cors="~0.7.0" -apispec="~3.3.0" +apispec="^6.6.0" async-timeout="~4.0.2" base58="~2.1.0" ConfigArgParse="~1.5.3" deepmerge="~0.3.0" -ecdsa="~0.16.1" +ecdsa="~0.19.0" jsonpath-ng="1.6.1" -Markdown="~3.5.2" +Markdown="~3.6" markupsafe="2.0.1" marshmallow="~3.20.1" -nest_asyncio="~1.5.5" +nest_asyncio="~1.6.0" packaging="~23.1" -portalocker="~2.7.0" +portalocker="~2.8.2" prompt_toolkit=">=2.0.9,<2.1.0" -pydid="^0.4.3" +pydid="^0.5.0" pyjwt="~2.8.0" pyld="^2.0.4" pynacl="~1.5.0" @@ -43,11 +43,12 @@ pytz="~2021.1" pyyaml="~6.0.1" qrcode = {version = ">=6.1,<7.0", extras = ["pil"]} requests="~2.31.0" -rlp="4.0.0" +rlp="4.0.1" unflatten="~0.1" sd-jwt = "^0.10.3" did-peer-2 = "^0.1.2" did-peer-4 = "^0.1.4" +uuid_utils = "^0.7.0" # askar aries-askar= { version = "~0.3.0", optional = true } @@ -58,16 +59,13 @@ anoncreds= { version = "0.2.0", optional = true } # bbs ursa-bbs-signatures= { version = "~1.0.1", optional = true } -# indy -python3-indy= { version = "^1.11.1", optional = true } - [tool.poetry.group.dev.dependencies] -pre-commit="~3.3.3" +pre-commit = "~3.7.0" # Sync with version in .pre-commit-config.yaml -ruff = "0.1.2" +ruff = "0.4.4" # Sync with version in .github/workflows/blackformat.yml # Sync with version in .pre-commit-config.yaml -black="24.3.0" +black = "24.4.2" sphinx="1.8.4" sphinx-rtd-theme=">=0.4.3" @@ -78,11 +76,10 @@ pydevd="1.5.1" pydevd-pycharm="~193.6015.39" # testing -pytest= "^8.0.0" -pytest-asyncio= "^0.23.5" -pytest-cov= "4.1.0" -pytest-ruff="^0.1.1" -mock= "~4.0" +pytest = "^8.2.0" +pytest-asyncio = "^0.23.6" +pytest-cov = "^5.0.0" +pytest-ruff = "^0.3.2" [tool.poetry.extras] askar = [ @@ -94,17 +91,14 @@ askar = [ bbs = [ "ursa-bbs-signatures" ] -indy = [ - "python3-indy" -] [tool.poetry.scripts] aca-py = "aries_cloudagent.__main__:script_main" [tool.ruff] -select = ["B006", "C", "D", "E", "F"] +lint.select = ["B006", "C", "D", "E", "F"] -ignore = [ +lint.ignore = [ # Google Python Doc Style "D203", "D204", "D213", "D215", "D400", "D401", "D404", "D406", "D407", "D408", "D409", "D413", @@ -118,7 +112,7 @@ include = ["aries_cloudagent/**/*.py"] line-length = 90 -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "**/{tests}/*" = ["B006", "D", "E501", "F841"] [tool.pytest.ini_options] @@ -139,9 +133,6 @@ markers = [ ] junit_family = "xunit1" asyncio_mode = "auto" -filterwarnings = [ - 'ignore:distutils Version classes are deprecated. Use packaging.version instead.:DeprecationWarning', # Ignore specific DeprecationWarning for old packages using distutils version class -] [tool.coverage.run] diff --git a/scripts/run_tests_indy b/scripts/run_tests_indy deleted file mode 100755 index 37fab8d5e7..0000000000 --- a/scripts/run_tests_indy +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash - -cd "$(dirname "$0")" || exit -CONTAINER_RUNTIME="${CONTAINER_RUNTIME:-docker}" - -DOCKER_BUILDKIT=1 $CONTAINER_RUNTIME build \ - -t aries-cloudagent-test \ - -f ../docker/Dockerfile.indy \ - --target acapy-test .. \ - --build-arg indy_version=1.16.0 \ - || exit 1 - -if [ ! -d ../test-reports ]; then mkdir ../test-reports; fi - -# on Windows, docker run needs to be prefixed by winpty -if [ "$OSTYPE" == "msys" ]; then - CONTAINER_RUNTIME="winpty docker" -fi -if [ -z "$DOCKER_NET" ]; then - DOCKER_NET="bridge" -fi - -if [ -z "$POSTGRES_URL" ]; then - if [ -n "$($CONTAINER_RUNTIME ps --filter name=indy-demo-postgres --quiet)" ]; then - DOCKER_ARGS="$DOCKER_ARGS --link indy-demo-postgres" - POSTGRES_URL="indy-demo-postgres" - fi -fi -if [ -n "$POSTGRES_URL" ]; then - DOCKER_ARGS="$DOCKER_ARGS -e POSTGRES_URL=$POSTGRES_URL" -fi - -$CONTAINER_RUNTIME run --rm -ti --name aries-cloudagent-runner \ - --network=${DOCKER_NET} \ - -v "$(pwd)/../test-reports:/home/indy/src/app/test-reports" \ - $DOCKER_ARGS \ - aries-cloudagent-test "$@" diff --git a/sonar-project.properties b/sonar-project.properties new file mode 100644 index 0000000000..03ad4b3b74 --- /dev/null +++ b/sonar-project.properties @@ -0,0 +1,5 @@ +sonar.projectKey=hyperledger_aries-cloudagent-python +sonar.organization=hyperledger +sonar.projectName=aries-cloudagent-python + +sonar.python.version=3.9 \ No newline at end of file