From 80d797cfa980612db784f0832a54a3147abc5ad6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 15 Apr 2025 12:37:01 +0000 Subject: [PATCH 01/32] chore(deps): Bump tj-actions/changed-files in the all-actions group (#3648) --- .github/workflows/bdd-integration-tests.yml | 2 +- .github/workflows/bdd-interop-tests.yml | 2 +- .github/workflows/scenario-integration-tests.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/bdd-integration-tests.yml b/.github/workflows/bdd-integration-tests.yml index dab42ab1ac..b36dc0544e 100644 --- a/.github/workflows/bdd-integration-tests.yml +++ b/.github/workflows/bdd-integration-tests.yml @@ -31,7 +31,7 @@ jobs: fetch-depth: 0 - name: Check changed files id: check-changed-files - uses: tj-actions/changed-files@v46.0.4 + uses: tj-actions/changed-files@v46.0.5 with: files_yaml: | src: diff --git a/.github/workflows/bdd-interop-tests.yml b/.github/workflows/bdd-interop-tests.yml index c3758d004c..0554421a9f 100644 --- a/.github/workflows/bdd-interop-tests.yml +++ b/.github/workflows/bdd-interop-tests.yml @@ -31,7 +31,7 @@ jobs: fetch-depth: 0 - name: Check changed files id: check-changed-files - uses: tj-actions/changed-files@v46.0.4 + uses: tj-actions/changed-files@v46.0.5 with: files_yaml: | src: diff --git a/.github/workflows/scenario-integration-tests.yml b/.github/workflows/scenario-integration-tests.yml index 4e9c6c7a01..115acb1e4c 100644 --- a/.github/workflows/scenario-integration-tests.yml +++ b/.github/workflows/scenario-integration-tests.yml @@ -29,7 +29,7 @@ jobs: fetch-depth: 0 - name: Check changed files id: check-changed-files - uses: tj-actions/changed-files@v46.0.4 + uses: tj-actions/changed-files@v46.0.5 with: files_yaml: | scenarios: "scenarios/**/*" From 23d9b9281a99fc4cda99edda8c633f3d119106da Mon Sep 17 00:00:00 2001 From: Robbie Blaine Date: Tue, 15 Apr 2025 11:45:57 +0200 Subject: [PATCH 02/32] :construction_worker: Fix Docker Caching * Instead of caching to the Runner FS and then uploading it, use native Docker caching to Github Actions * Run and fix `zizmor` warnings * Disable persisting git credentials * Disable caching of Buildx binary * Replace unnecessary "Gather image info" step with Github's built in `${{ github.repository_owner }}` function * Update `docker/build-push-action` from `v5` to `v6` Signed-off-by: Robbie Blaine --- .github/workflows/publish.yml | 40 ++++++++++------------------------- 1 file changed, 11 insertions(+), 29 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 2367b945c8..515d9736ec 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -54,22 +54,12 @@ jobs: uses: actions/checkout@v4 with: ref: ${{ inputs.ref || '' }} - - - name: Gather image info - id: info - run: | - echo "repo-owner=${GITHUB_REPOSITORY_OWNER,,}" >> $GITHUB_OUTPUT - - - name: Cache Docker layers - uses: actions/cache@v4 - with: - path: /tmp/.buildx-cache - key: ${{ runner.os }}-buildx-${{ github.sha }} - restore-keys: | - ${{ runner.os }}-buildx- + persist-credentials: false - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 + with: + cache-binary: false - name: Log in to the GitHub Container Registry uses: docker/login-action@v3 @@ -83,12 +73,12 @@ jobs: uses: docker/metadata-action@v5 with: images: | - ghcr.io/${{ steps.info.outputs.repo-owner }}/acapy-agent + ghcr.io/${{ github.repository_owner }}/acapy-agent tags: | type=raw,value=py${{ matrix.python-version }}-${{ inputs.tag || github.event.release.tag_name }} - name: Build and Push Image to ghcr.io - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: push: true context: . @@ -99,8 +89,8 @@ jobs: build-args: | python_version=${{ matrix.python-version }} acapy_version=${{ inputs.tag || github.event.release.tag_name }} - cache-from: type=local,src=/tmp/.buildx-cache - cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max + cache-from: type=gha,scope=acapy-agent + cache-to: type=gha,scope=acapy-agent,mode=max platforms: ${{ env.PLATFORMS }} - name: Setup Image Metadata (BBS) @@ -108,12 +98,12 @@ jobs: uses: docker/metadata-action@v5 with: images: | - ghcr.io/${{ steps.info.outputs.repo-owner }}/acapy-agent-bbs + ghcr.io/${{ github.repository_owner }}/acapy-agent-bbs tags: | type=raw,value=py${{ matrix.python-version }}-${{ inputs.tag || github.event.release.tag_name }} - name: Build and Push extended Image to ghcr.io - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: push: true context: . @@ -126,16 +116,8 @@ jobs: acapy_name=acapy-agent-bbs acapy_version=${{ inputs.tag || github.event.release.tag_name }} acapy_reqs=[askar,bbs,didcommv2] - cache-from: type=local,src=/tmp/.buildx-cache - cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max + cache-from: type=gha,scope=acapy-agent-bbs + cache-to: type=gha,scope=acapy-agent-bbs,mode=max # Because of BBS, only linux/amd64 is supported for the extended image # https://github.com/openwallet-foundation/acapy/issues/2124#issuecomment-2293569659 platforms: linux/amd64 - - # Temp fix - # https://github.com/docker/build-push-action/issues/252 - # https://github.com/moby/buildkit/issues/1896 - - name: Move cache - run: | - rm -rf /tmp/.buildx-cache - mv /tmp/.buildx-cache-new /tmp/.buildx-cache From 27bfea8cbbb0e35bdc1069b839995ecbf49a535e Mon Sep 17 00:00:00 2001 From: Robbie Blaine Date: Tue, 15 Apr 2025 16:15:13 +0200 Subject: [PATCH 03/32] :construction_worker: Ensure repo owner is lowercased for image name Signed-off-by: Robbie Blaine --- .github/workflows/publish.yml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 515d9736ec..469c74f4b4 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -68,12 +68,17 @@ jobs: username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} + # Ensure the repo owner is lowercase for the image name + - name: Lowercase Repo Owner + id: lower + run: echo "owner=${GITHUB_REPOSITORY_OWNER,,}" >> $GITHUB_OUTPUT + - name: Setup Image Metadata id: meta uses: docker/metadata-action@v5 with: images: | - ghcr.io/${{ github.repository_owner }}/acapy-agent + ghcr.io/${{ steps.lower.outputs.owner }}/acapy-agent tags: | type=raw,value=py${{ matrix.python-version }}-${{ inputs.tag || github.event.release.tag_name }} @@ -98,7 +103,7 @@ jobs: uses: docker/metadata-action@v5 with: images: | - ghcr.io/${{ github.repository_owner }}/acapy-agent-bbs + ghcr.io/${{ steps.lower.outputs.owner }}/acapy-agent-bbs tags: | type=raw,value=py${{ matrix.python-version }}-${{ inputs.tag || github.event.release.tag_name }} From 090747f9ec0323522886b17f73566e5f0f8a5bef Mon Sep 17 00:00:00 2001 From: Robbie Blaine Date: Tue, 15 Apr 2025 16:55:42 +0200 Subject: [PATCH 04/32] :construction_worker: Split Docker Builds * Build Standard image on native CPU architectures * `linux/arm64` builds on `ubuntu-24.04-arm` (`arm64`) * `linux/amd64` builds on `ubuntu-24.04` (`x86_64`) * Split build and publish * Build docker images on native runners, don't push, cache to GHA * Combine multi-arch, read from cache, push to GHCR.io * Due to a version mismatch in buildx between `ubuntu-24.04` and `ubuntu-24.04-arm`, have to ensure the latest version of `buildx` is installed * Due to how the platforms are being handled, I've removed the `platforms` input to the workflow Signed-off-by: Robbie Blaine --- .github/workflows/nightly.yml | 1 - .github/workflows/publish.yml | 130 ++++++++++++++++++++-------------- 2 files changed, 76 insertions(+), 55 deletions(-) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index cee86d04d4..081109d34d 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -56,4 +56,3 @@ jobs: tag: ["nightly-${{needs.setup_and_check_pub.outputs.date}}", nightly] with: tag: ${{ matrix.tag }} - platforms: "linux/amd64,linux/arm64" diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 469c74f4b4..6cc46000ce 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -7,16 +7,11 @@ on: workflow_dispatch: inputs: tag: - description: 'Image tag' + description: Image tag required: true type: string - platforms: - description: 'Platforms - Comma separated list of the platforms to support.' - required: true - default: linux/amd64,linux/arm64 - type: string ref: - description: 'Optional - The branch, tag or SHA to checkout.' + description: Optional - The branch, tag or SHA to checkout. required: false type: string workflow_call: @@ -24,31 +19,81 @@ on: tag: required: true type: string - platforms: - required: true - default: linux/amd64,linux/arm64 - type: string ref: required: false type: string -env: - # linux/386 platform support has been disabled pending a permanent fix for https://github.com/openwallet-foundation/acapy/issues/2124 - # PLATFORMS: ${{ inputs.platforms || 'linux/amd64,linux/arm64,linux/386' }} - PLATFORMS: ${{ inputs.platforms || 'linux/amd64,linux/arm64' }} - jobs: - publish-image: + build-image: strategy: fail-fast: false matrix: - python-version: ['3.12'] + python-version: ["3.12"] + arch: ["amd64", "arm64"] + include: + - arch: amd64 + runs-on: ubuntu-24.04 + - arch: arm64 + runs-on: ubuntu-24.04-arm - name: Publish ACA-Py Image + name: Build ACA-Py Image + runs-on: ${{ matrix.runs-on }} + permissions: + contents: read + packages: write + steps: + - name: Checkout Code + uses: actions/checkout@v4 + with: + ref: ${{ inputs.ref || '' }} + persist-credentials: false + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + with: + cache-binary: false + install: true + version: latest + + - name: Build and Cache Image + uses: docker/build-push-action@v6 + with: + push: false + context: . + file: docker/Dockerfile + build-args: | + python_version=${{ matrix.python-version }} + acapy_version=${{ inputs.tag || github.event.release.tag_name }} + cache-from: type=gha,scope=acapy-agent-${{ matrix.arch }} + cache-to: type=gha,scope=acapy-agent-${{ matrix.arch }},mode=max + platforms: linux/${{ matrix.arch }} + + publish-images: + strategy: + fail-fast: false + matrix: + python-version: ["3.12"] + image-type: ["standard", "bbs"] + include: + - image-type: standard + image-name: acapy-agent + # linux/386 platform support has been disabled pending a permanent fix for https://github.com/openwallet-foundation/acapy/issues/2124 + # platforms: linux/amd64,linux/arm64,linux/386 + platforms: linux/amd64,linux/arm64 + acapy-reqs: "" + - image-type: bbs + image-name: acapy-agent-bbs + platforms: linux/amd64 + acapy-reqs: "[askar,bbs,didcommv2]" + + name: Publish ACA-Py ${{ matrix.image-type == 'bbs' && 'BBS ' || '' }} Image runs-on: ubuntu-latest permissions: contents: read packages: write + + needs: build-image + steps: - name: Checkout Code uses: actions/checkout@v4 @@ -60,6 +105,8 @@ jobs: uses: docker/setup-buildx-action@v3 with: cache-binary: false + install: true + version: latest - name: Log in to the GitHub Container Registry uses: docker/login-action@v3 @@ -78,11 +125,11 @@ jobs: uses: docker/metadata-action@v5 with: images: | - ghcr.io/${{ steps.lower.outputs.owner }}/acapy-agent + ghcr.io/${{ steps.lower.outputs.owner }}/${{ matrix.image-name }} tags: | type=raw,value=py${{ matrix.python-version }}-${{ inputs.tag || github.event.release.tag_name }} - - name: Build and Push Image to ghcr.io + - name: Publish Image to GHCR.io uses: docker/build-push-action@v6 with: push: true @@ -90,39 +137,14 @@ jobs: file: docker/Dockerfile tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - target: main - build-args: | - python_version=${{ matrix.python-version }} - acapy_version=${{ inputs.tag || github.event.release.tag_name }} - cache-from: type=gha,scope=acapy-agent - cache-to: type=gha,scope=acapy-agent,mode=max - platforms: ${{ env.PLATFORMS }} - - - name: Setup Image Metadata (BBS) - id: meta-bbs - uses: docker/metadata-action@v5 - with: - images: | - ghcr.io/${{ steps.lower.outputs.owner }}/acapy-agent-bbs - tags: | - type=raw,value=py${{ matrix.python-version }}-${{ inputs.tag || github.event.release.tag_name }} - - - name: Build and Push extended Image to ghcr.io - uses: docker/build-push-action@v6 - with: - push: true - context: . - file: docker/Dockerfile - tags: ${{ steps.meta-bbs.outputs.tags }} - labels: ${{ steps.meta-bbs.outputs.labels }} - target: main build-args: | python_version=${{ matrix.python-version }} - acapy_name=acapy-agent-bbs acapy_version=${{ inputs.tag || github.event.release.tag_name }} - acapy_reqs=[askar,bbs,didcommv2] - cache-from: type=gha,scope=acapy-agent-bbs - cache-to: type=gha,scope=acapy-agent-bbs,mode=max - # Because of BBS, only linux/amd64 is supported for the extended image - # https://github.com/openwallet-foundation/acapy/issues/2124#issuecomment-2293569659 - platforms: linux/amd64 + ${{ matrix.image-type == 'bbs' && 'acapy_name=acapy-agent-bbs' || '' }} + ${{ matrix.acapy-reqs != '' && format('acapy_reqs={0}', matrix.acapy-reqs) || '' }} + cache-from: | + ${{ matrix.image-type == 'standard' && 'type=gha,scope=acapy-agent-arm64' || '' }} + ${{ matrix.image-type == 'standard' && 'type=gha,scope=acapy-agent-amd64' || '' }} + ${{ matrix.image-type == 'bbs' && 'type=gha,scope=acapy-agent-bbs' || ''}} + cache-to: ${{ matrix.image-type == 'bbs' && 'type=gha,scope=acapy-agent-bbs,mode=max' || '' }} + platforms: ${{ matrix.platforms }} From 7f98526114264ffb37402129895dc6c4bf9516e8 Mon Sep 17 00:00:00 2001 From: Robbie Blaine Date: Tue, 15 Apr 2025 17:16:51 +0200 Subject: [PATCH 05/32] :memo: Re-add comment about BBS and `linux/amd64` Signed-off-by: Robbie Blaine --- .github/workflows/publish.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 6cc46000ce..d059def99b 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -83,6 +83,8 @@ jobs: acapy-reqs: "" - image-type: bbs image-name: acapy-agent-bbs + # Because of BBS, only linux/amd64 is supported for the extended image + # https://github.com/openwallet-foundation/acapy/issues/2124#issuecomment-2293569659 platforms: linux/amd64 acapy-reqs: "[askar,bbs,didcommv2]" From 3e4b8e1fb225f1cf7288f70eb4711fb3de34372e Mon Sep 17 00:00:00 2001 From: Mourits de Beer <31511766+ff137@users.noreply.github.com> Date: Wed, 16 Apr 2025 12:35:52 +0200 Subject: [PATCH 06/32] :bug: Fix public did no longer being correctly configured (#3646) * :art: Add space to log Signed-off-by: ff137 * :art: Remove unnecessary method (only used once) Signed-off-by: ff137 * :art: Remove unused arg Signed-off-by: ff137 * :art: Remove f-string formatting from logs; fix string concatenation Signed-off-by: ff137 * :bug: Add missing return statement Signed-off-by: ff137 * :art: More removing f-string format from logs Signed-off-by: ff137 * :art: Clarify log Signed-off-by: ff137 * :bug: Fix: return public_did_info. Resolves #3645 Signed-off-by: ff137 * :art: Clean up `_initialize_with_public_did` method. Rename to `_replace_public_did_if_seed_mismatch` for clarity, and simplify logic Signed-off-by: ff137 * :art: None safety Signed-off-by: ff137 * :art: Remove extra space Signed-off-by: ff137 * :art: Remove log method and just call directly Signed-off-by: ff137 * :bug: Add missing return statement Signed-off-by: ff137 * :white_check_mark: Expand test coverage for wallet config Signed-off-by: ff137 --------- Signed-off-by: ff137 --- acapy_agent/config/tests/test_wallet.py | 40 +++++++-- acapy_agent/config/wallet.py | 111 +++++++++++++----------- acapy_agent/core/conductor.py | 26 +++--- 3 files changed, 109 insertions(+), 68 deletions(-) diff --git a/acapy_agent/config/tests/test_wallet.py b/acapy_agent/config/tests/test_wallet.py index 37d35d0025..f82fd152c2 100644 --- a/acapy_agent/config/tests/test_wallet.py +++ b/acapy_agent/config/tests/test_wallet.py @@ -154,7 +154,14 @@ async def test_wallet_config_auto_provision(self): await test_module.wallet_config(self.context, provision=False) self.context.update_settings({"auto_provision": True}) - await test_module.wallet_config(self.context, provision=False) + profile, did_info = await test_module.wallet_config( + self.context, provision=False + ) + + self.assertEqual(profile, self.profile) + self.assertIsNotNone(did_info) + self.assertEqual(did_info.did, TEST_DID) + self.assertEqual(did_info.verkey, TEST_VERKEY) async def test_wallet_config_non_indy_x(self): self.context.update_settings( @@ -227,7 +234,14 @@ async def test_wallet_config_seed_local(self): ): mock_seed_to_did.return_value = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" - await test_module.wallet_config(self.context, provision=True) + profile, did_info = await test_module.wallet_config( + self.context, provision=True + ) + + self.assertEqual(profile, self.profile) + self.assertIsNotNone(did_info) + self.assertEqual(did_info.did, TEST_DID) + self.assertEqual(did_info.verkey, TEST_VERKEY) async def test_wallet_config_seed_public(self): self.context.update_settings( @@ -254,9 +268,17 @@ async def test_wallet_config_seed_public(self): test_module, "add_or_update_version_to_storage", mock.CoroutineMock() ), ): - await test_module.wallet_config(self.context, provision=True) + profile, did_info = await test_module.wallet_config( + self.context, provision=True + ) + + self.assertEqual(profile, self.profile) + self.assertIsNotNone(did_info) + self.assertEqual(did_info.did, TEST_DID) + self.assertEqual(did_info.verkey, TEST_VERKEY) async def test_wallet_config_seed_no_public_did(self): + self.context.update_settings({"wallet.seed": "original_seed"}) mock_wallet = mock.MagicMock( get_public_did=mock.CoroutineMock(return_value=None), set_public_did=mock.CoroutineMock(), @@ -267,16 +289,18 @@ async def test_wallet_config_seed_no_public_did(self): self.injector.bind_instance(BaseWallet, mock_wallet) with ( - mock.patch.object( - test_module, "seed_to_did", mock.MagicMock() - ) as mock_seed_to_did, mock.patch.object( test_module, "add_or_update_version_to_storage", mock.CoroutineMock() ), ): - mock_seed_to_did.return_value = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" + profile, did_info = await test_module.wallet_config( + self.context, provision=True + ) - await test_module.wallet_config(self.context, provision=True) + self.assertEqual(profile, self.profile) + self.assertIsNotNone(did_info) + self.assertEqual(did_info.did, TEST_DID) + self.assertEqual(did_info.verkey, TEST_VERKEY) async def test_wallet_config_for_key_derivation_method(self): self.context.update_settings( diff --git a/acapy_agent/config/wallet.py b/acapy_agent/config/wallet.py index db5a3b662a..2887701d92 100644 --- a/acapy_agent/config/wallet.py +++ b/acapy_agent/config/wallet.py @@ -68,41 +68,51 @@ async def _attempt_open_profile( return (profile, provision) -def _log_provision_info(profile: Profile) -> None: - LOGGER.info( - "Created new profile - " - if profile.created - else "Opened existing profile - " - f"Profile name: {profile.name}, backend: {profile.backend}" - ) - - -async def _initialize_with_public_did( +async def _replace_public_did_if_seed_mismatch( public_did_info: DIDInfo, wallet: BaseWallet, settings: dict, wallet_seed: str, -) -> str: +) -> DIDInfo: + """Replace the public DID if a seed is provided and doesn't match the current DID. + + Args: + public_did_info: Current public DID info + wallet: Wallet instance + settings: Configuration settings + wallet_seed: Optional seed to check against current DID + + Returns: + DIDInfo: Either the original DID info or a new one if replaced + """ + if not wallet_seed: + return public_did_info + public_did = public_did_info.did - # Check did:sov seed matches public DID - if wallet_seed and (seed_to_did(wallet_seed) != public_did): - if not settings.get("wallet.replace_public_did"): - raise ConfigError( - "New seed provided which doesn't match the registered" - + f" public did {public_did}" - ) + if seed_to_did(wallet_seed) == public_did: + return public_did_info - LOGGER.info("Replacing public DID due to --replace-public-did flag") - replace_did_info = await wallet.create_local_did( - method=SOV, key_type=ED25519, seed=wallet_seed - ) - public_did = replace_did_info.did - await wallet.set_public_did(public_did) - LOGGER.info( - f"Created new public DID: {public_did}, " - f"with verkey: {replace_did_info.verkey}" + if not settings.get("wallet.replace_public_did"): + raise ConfigError( + "New seed provided which doesn't match the registered " + f"public did {public_did}" ) + LOGGER.info( + "Replacing public DID which doesn't match the seed " + "(as configured by --replace-public-did)" + ) + replace_did_info = await wallet.create_local_did( + method=SOV, key_type=ED25519, seed=wallet_seed + ) + await wallet.set_public_did(replace_did_info.did) + LOGGER.info( + "Created new public DID: %s, with verkey: %s", + replace_did_info.did, + replace_did_info.verkey, + ) + return replace_did_info + async def _initialize_with_debug_settings(settings: dict, wallet: BaseWallet): test_seed = settings.get("debug.seed") @@ -118,32 +128,28 @@ async def _initialize_with_debug_settings(settings: dict, wallet: BaseWallet): async def _initialize_with_seed( - settings: dict, wallet: BaseWallet, provision: bool, create_local_did: bool, seed: str -): - def _log_did_info(did: str, verkey: str, is_public: bool): - LOGGER.info( - f"Created new {'public' if is_public else 'local'}" - f"DID: {did}, Verkey: {verkey}" - ) - + settings: dict, wallet: BaseWallet, create_local_did: bool, seed: str +) -> DIDInfo: if create_local_did: endpoint = settings.get("default_endpoint") metadata = {"endpoint": endpoint} if endpoint else None - local_did_info = await wallet.create_local_did( + did_info = await wallet.create_local_did( method=SOV, key_type=ED25519, seed=seed, metadata=metadata, ) - local_did = local_did_info.did - _log_did_info(local_did, local_did_info.verkey, False) else: - public_did_info = await wallet.create_public_did( - method=SOV, key_type=ED25519, seed=seed - ) - public_did = public_did_info.did - _log_did_info(public_did, public_did_info.verkey, True) + did_info = await wallet.create_public_did(method=SOV, key_type=ED25519, seed=seed) + + LOGGER.info( + "Created new %s DID: %s, Verkey: %s", + "local" if create_local_did else "public", + did_info.did, + did_info.verkey, + ) + return did_info async def wallet_config( @@ -165,24 +171,31 @@ async def wallet_config( profile_manager, context, profile_config, settings ) - _log_provision_info(profile) + LOGGER.info( + "%s Profile name: %s, backend: %s", + "Created new profile -" if profile.created else "Opened existing profile -", + profile.name, + profile.backend, + ) txn = await profile.transaction() wallet = txn.inject(BaseWallet) public_did_info = await wallet.get_public_did() - public_did = None if public_did_info: - public_did = await _initialize_with_public_did( + # Check if we need to replace the public DID due to seed mismatch + public_did_info = await _replace_public_did_if_seed_mismatch( public_did_info, wallet, settings, wallet_seed ) elif wallet_seed: - await _initialize_with_seed( - settings, wallet, provision, create_local_did, wallet_seed + # Create new public DID from seed if none exists + public_did_info = await _initialize_with_seed( + settings, wallet, create_local_did, wallet_seed ) + public_did = public_did_info.did if public_did_info else None if provision and not create_local_did and not public_did: - LOGGER.info("No public DID") + LOGGER.info("No public DID created") await _initialize_with_debug_settings(settings, wallet) diff --git a/acapy_agent/core/conductor.py b/acapy_agent/core/conductor.py index 6b7623b361..1b5f7c9b23 100644 --- a/acapy_agent/core/conductor.py +++ b/acapy_agent/core/conductor.py @@ -395,8 +395,8 @@ async def start(self) -> None: ) from_version_storage = record.value LOGGER.info( - "Existing acapy_version storage record found, " - f"version set to {from_version_storage}" + "Existing acapy_version storage record found, version set to %s", + from_version_storage, ) except StorageNotFoundError: LOGGER.warning("Wallet version storage record not found.") @@ -432,8 +432,8 @@ async def start(self) -> None: LOGGER.warning( ( "No upgrade from version was found from wallet or via" - " --from-version startup argument. Defaulting to " - f"{DEFAULT_ACAPY_VERSION}." + " --from-version startup argument. Defaulting to %s.", + DEFAULT_ACAPY_VERSION, ) ) from_version = DEFAULT_ACAPY_VERSION @@ -469,9 +469,12 @@ async def start(self) -> None: ) LOGGER.info( "Created static connection for test suite\n" - f" - My DID: {test_conn.my_did}\n" - f" - Their DID: {test_conn.their_did}\n" - f" - Their endpoint: {their_endpoint}\n" + " - My DID: %s\n" + " - Their DID: %s\n" + " - Their endpoint: %s\n", + test_conn.my_did, + test_conn.their_did, + their_endpoint, ) del mgr LOGGER.debug("Static connection for test suite created and manager deleted.") @@ -490,7 +493,7 @@ async def start(self) -> None: mediation_mgr = MediationManager(self.root_profile) try: await mediation_mgr.set_default_mediator_by_id(default_mediator_id) - LOGGER.info(f"Default mediator set to {default_mediator_id}") + LOGGER.info("Default mediator set to %s", default_mediator_id) except Exception: LOGGER.exception("Error updating default mediator.") @@ -512,7 +515,7 @@ async def start(self) -> None: ) base_url = context.settings.get("invite_base_url") invite_url = invi_rec.invitation.to_url(base_url) - LOGGER.info(f"Invitation URL:\n{invite_url}") + LOGGER.info("Invitation URL:\n%s", invite_url) qr = QRCode(border=1) qr.add_data(invite_url) qr.print_ascii(invert=True) @@ -880,7 +883,8 @@ async def check_for_valid_wallet_type(self, profile): if acapy_version: storage_type_from_storage = STORAGE_TYPE_VALUE_ASKAR LOGGER.info( - f"Existing agent found. Setting wallet type to {storage_type_from_storage}." # noqa: E501 + "Existing agent found. Setting wallet type to %s.", + storage_type_from_storage, ) await storage.add_record( StorageRecord( @@ -891,7 +895,7 @@ async def check_for_valid_wallet_type(self, profile): else: storage_type_from_storage = storage_type_from_config LOGGER.info( - f"New agent. Setting wallet type to {storage_type_from_config}." + "New agent. Setting wallet type to %s.", storage_type_from_config ) await storage.add_record( StorageRecord( From 50e19c246858a282e2fa4cc7fe1d7792b9cc84cd Mon Sep 17 00:00:00 2001 From: Robbie Blaine <4052340+rblaine95@users.noreply.github.com> Date: Wed, 16 Apr 2025 15:05:30 +0200 Subject: [PATCH 07/32] :construction_worker: Optimize Dockerfile to reduce cache invalidation (#3655) Signed-off-by: Robbie Blaine --- docker/Dockerfile | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 5f3c99e5cc..8ee54ad55f 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,12 +1,15 @@ ARG python_version=3.12 FROM python:${python_version}-slim-bookworm AS build +RUN pip install --no-cache-dir poetry==2.1.1 + WORKDIR /src -COPY ./acapy_agent ./acapy_agent -COPY ./pyproject.toml ./poetry.lock ./README.md ./ +COPY ./pyproject.toml ./poetry.lock ./ +RUN poetry install --no-root -RUN pip install --no-cache-dir poetry==2.1.1 +COPY ./acapy_agent ./acapy_agent +COPY ./README.md /src RUN poetry build FROM python:${python_version}-slim-bookworm AS main @@ -18,7 +21,7 @@ ARG acapy_version ARG acapy_reqs=[didcommv2] ENV HOME="/home/$user" \ - APP_ROOT="$HOME" \ + APP_ROOT="/home/$user" \ LC_ALL=C.UTF-8 \ LANG=C.UTF-8 \ PIP_NO_CACHE_DIR=off \ @@ -42,7 +45,7 @@ LABEL summary="$SUMMARY" \ RUN useradd -U -ms /bin/bash -u $uid $user # Install environment -RUN apt-get update -y && \ +RUN apt-get update && \ apt-get install -y --no-install-recommends \ apt-transport-https \ ca-certificates \ @@ -55,9 +58,10 @@ RUN apt-get update -y && \ openssl \ sqlite3 \ zlib1g && \ + apt-get autopurge -y && \ + apt-get clean -y && \ rm -rf /var/lib/apt/lists/* /usr/share/doc/* - WORKDIR $HOME # Add local binaries and aliases to path @@ -95,10 +99,4 @@ RUN acapy_agent_package=$(find ./ -name "acapy_agent*.whl" | head -n 1) && \ rm acapy_agent*.whl && \ chmod +rx $(python -m site --user-site) $HOME/.local -# Clean-up unnecessary build dependencies and reduce final image size -USER root -RUN apt-get purge -y - -USER $user - ENTRYPOINT ["aca-py"] From 10d2d2bd71ea006c7492c24bd34402761d6f839d Mon Sep 17 00:00:00 2001 From: Mourits de Beer <31511766+ff137@users.noreply.github.com> Date: Wed, 16 Apr 2025 15:50:46 +0200 Subject: [PATCH 08/32] :art: Fix swagger tag names for AnonCreds endpoints (#3661) * :art: Use revocation_anoncreds tag title Signed-off-by: ff137 * :art: Deduplicate tag titles Signed-off-by: ff137 * :art: Deduplicate tag titles Signed-off-by: ff137 * :art: Avoid circular import, due to knot between anoncreds/routes and revocation_anoncreds/routes Signed-off-by: ff137 * :art: Fix new lines in description summary Signed-off-by: ff137 * :memo: Update openapi spec Signed-off-by: ff137 * :art: Reword update description Signed-off-by: ff137 --------- Signed-off-by: ff137 Co-authored-by: Stephen Curran --- acapy_agent/anoncreds/routes.py | 37 ++++++++++++------ acapy_agent/revocation_anoncreds/routes.py | 2 +- acapy_agent/wallet/keys/routes.py | 7 ++-- acapy_agent/wallet/routes.py | 45 +++++++++++++--------- open-api/openapi.json | 24 ++++++------ open-api/swagger.json | 24 ++++++------ 6 files changed, 81 insertions(+), 58 deletions(-) diff --git a/acapy_agent/anoncreds/routes.py b/acapy_agent/anoncreds/routes.py index 28ee72041b..5739679d50 100644 --- a/acapy_agent/anoncreds/routes.py +++ b/acapy_agent/anoncreds/routes.py @@ -48,6 +48,10 @@ LOGGER = logging.getLogger(__name__) +CRED_DEF_TAG_TITLE = "AnonCreds - Credential Definitions" +SCHEMAS_TAG_TITLE = "AnonCreds - Schemas" +REVOCATION_TAG_TITLE = "AnonCreds - Revocation" + SPEC_URI = "https://hyperledger.github.io/anoncreds-spec" endorser_connection_id_description = ( @@ -153,7 +157,10 @@ class SchemaPostRequestSchema(OpenAPISchema): options = fields.Nested(SchemaPostOptionSchema()) -@docs(tags=["anoncreds - schemas"], summary="Create a schema on the connected datastore") +@docs( + tags=[SCHEMAS_TAG_TITLE], + summary="Create a schema on the connected datastore", +) @request_schema(SchemaPostRequestSchema()) @response_schema(SchemaResultSchema(), 200, description="") @tenant_authentication @@ -225,7 +232,10 @@ async def schemas_post(request: web.BaseRequest): raise web.HTTPBadRequest(reason=e.roll_up) from e -@docs(tags=["anoncreds - schemas"], summary="Retrieve an individual schemas details") +@docs( + tags=[SCHEMAS_TAG_TITLE], + summary="Retrieve an individual schemas details", +) @match_info_schema(SchemaIdMatchInfo()) @response_schema(GetSchemaResultSchema(), 200, description="") @tenant_authentication @@ -255,7 +265,10 @@ async def schema_get(request: web.BaseRequest): raise web.HTTPBadRequest(reason=e.roll_up) from e -@docs(tags=["anoncreds - schemas"], summary="Retrieve all schema ids") +@docs( + tags=[SCHEMAS_TAG_TITLE], + summary="Retrieve all schema ids", +) @querystring_schema(SchemasQueryStringSchema()) @response_schema(GetSchemasResponseSchema(), 200, description="") @tenant_authentication @@ -397,7 +410,7 @@ class CredDefsQueryStringSchema(OpenAPISchema): @docs( - tags=["anoncreds - credential definitions"], + tags=[CRED_DEF_TAG_TITLE], summary="Create a credential definition on the connected datastore", ) @request_schema(CredDefPostRequestSchema()) @@ -449,7 +462,7 @@ async def cred_def_post(request: web.BaseRequest): @docs( - tags=["anoncreds - credential definitions"], + tags=[CRED_DEF_TAG_TITLE], summary="Retrieve an individual credential definition details", ) @match_info_schema(CredIdMatchInfo()) @@ -497,7 +510,7 @@ class GetCredDefsResponseSchema(OpenAPISchema): @docs( - tags=["anoncreds - credential definitions"], + tags=[CRED_DEF_TAG_TITLE], summary="Retrieve all credential definition ids", ) @querystring_schema(CredDefsQueryStringSchema()) @@ -592,7 +605,7 @@ class RevRegCreateRequestSchemaAnonCreds(OpenAPISchema): @docs( - tags=["anoncreds - revocation"], + tags=[REVOCATION_TAG_TITLE], summary="Create and publish a registration revocation on the connected datastore", ) @request_schema(RevRegCreateRequestSchemaAnonCreds()) @@ -677,7 +690,7 @@ class RevListCreateRequestSchema(OpenAPISchema): @docs( - tags=["anoncreds - revocation"], + tags=[REVOCATION_TAG_TITLE], summary="Create and publish a revocation status list on the connected datastore", ) @request_schema(RevListCreateRequestSchema()) @@ -713,7 +726,7 @@ async def rev_list_post(request: web.BaseRequest): @docs( - tags=["anoncreds - revocation"], + tags=[REVOCATION_TAG_TITLE], summary="Upload local tails file to server", ) @match_info_schema(AnonCredsRevRegIdMatchInfoSchema()) @@ -749,7 +762,7 @@ async def upload_tails_file(request: web.BaseRequest): @docs( - tags=["anoncreds - revocation"], + tags=[REVOCATION_TAG_TITLE], summary="Update the active registry", ) @match_info_schema(AnonCredsRevRegIdMatchInfoSchema()) @@ -820,14 +833,14 @@ def post_process_routes(app: web.Application): app._state["swagger_dict"]["tags"] = [] app._state["swagger_dict"]["tags"].append( { - "name": "AnonCreds - Schemas", + "name": SCHEMAS_TAG_TITLE, "description": "AnonCreds schema management", "externalDocs": {"description": "Specification", "url": SPEC_URI}, } ) app._state["swagger_dict"]["tags"].append( { - "name": "AnonCreds - Credential Definitions", + "name": CRED_DEF_TAG_TITLE, "description": "AnonCreds credential definition management", "externalDocs": {"description": "Specification", "url": SPEC_URI}, } diff --git a/acapy_agent/revocation_anoncreds/routes.py b/acapy_agent/revocation_anoncreds/routes.py index c89c929818..876106b6a0 100644 --- a/acapy_agent/revocation_anoncreds/routes.py +++ b/acapy_agent/revocation_anoncreds/routes.py @@ -313,7 +313,7 @@ class PublishRevocationsOptions(OpenAPISchema): endorser_connection_id = fields.Str( metadata={ - "description": endorser_connection_id_description, # noqa: F821 + "description": endorser_connection_id_description, "required": False, "example": UUIDFour.EXAMPLE, } diff --git a/acapy_agent/wallet/keys/routes.py b/acapy_agent/wallet/keys/routes.py index b2e4da7dbf..c35b02288c 100644 --- a/acapy_agent/wallet/keys/routes.py +++ b/acapy_agent/wallet/keys/routes.py @@ -10,6 +10,7 @@ from ...admin.request_context import AdminRequestContext from ...messaging.models.openapi import OpenAPISchema from ...wallet.error import WalletDuplicateError, WalletNotFoundError +from ..routes import WALLET_TAG_TITLE from .manager import DEFAULT_ALG, MultikeyManager, MultikeyManagerError LOGGER = logging.getLogger(__name__) @@ -124,7 +125,7 @@ class FetchKeyResponseSchema(OpenAPISchema): ) -@docs(tags=["wallet"], summary="Fetch key info.") +@docs(tags=[WALLET_TAG_TITLE], summary="Fetch key info.") @response_schema(FetchKeyResponseSchema, 200, description="") @tenant_authentication async def fetch_key(request: web.BaseRequest): @@ -149,7 +150,7 @@ async def fetch_key(request: web.BaseRequest): return web.json_response({"message": str(err)}, status=400) -@docs(tags=["wallet"], summary="Create a key pair") +@docs(tags=[WALLET_TAG_TITLE], summary="Create a key pair") @request_schema(CreateKeyRequestSchema()) @response_schema(CreateKeyResponseSchema, 200, description="") @tenant_authentication @@ -184,7 +185,7 @@ async def create_key(request: web.BaseRequest): return web.json_response({"message": str(err)}, status=400) -@docs(tags=["wallet"], summary="Update a key pair's kid") +@docs(tags=[WALLET_TAG_TITLE], summary="Update a key pair's kid") @request_schema(UpdateKeyRequestSchema()) @response_schema(UpdateKeyResponseSchema, 200, description="") @tenant_authentication diff --git a/acapy_agent/wallet/routes.py b/acapy_agent/wallet/routes.py index cd68be1d14..ef1add0667 100644 --- a/acapy_agent/wallet/routes.py +++ b/acapy_agent/wallet/routes.py @@ -88,6 +88,9 @@ LOGGER = logging.getLogger(__name__) +WALLET_TAG_TITLE = "wallet" +UPGRADE_TAG_TITLE = "AnonCreds - Wallet Upgrade" + class WalletModuleResponseSchema(OpenAPISchema): """Response schema for Wallet Module.""" @@ -447,7 +450,7 @@ def format_did_info(info: DIDInfo): } -@docs(tags=["wallet"], summary="List wallet DIDs") +@docs(tags=[WALLET_TAG_TITLE], summary="List wallet DIDs") @querystring_schema(DIDListQueryStringSchema()) @response_schema(DIDListSchema, 200, description="") @tenant_authentication @@ -553,7 +556,7 @@ async def wallet_did_list(request: web.BaseRequest): return web.json_response({"results": results}) -@docs(tags=["wallet"], summary="Create a local DID") +@docs(tags=[WALLET_TAG_TITLE], summary="Create a local DID") @request_schema(DIDCreateSchema()) @response_schema(DIDResultSchema, 200, description="") @tenant_authentication @@ -680,7 +683,7 @@ async def wallet_create_did(request: web.BaseRequest): return web.json_response({"result": format_did_info(info)}) -@docs(tags=["wallet"], summary="Fetch the current public DID") +@docs(tags=[WALLET_TAG_TITLE], summary="Fetch the current public DID") @response_schema(DIDResultSchema, 200, description="") @tenant_authentication async def wallet_get_public_did(request: web.BaseRequest): @@ -707,7 +710,7 @@ async def wallet_get_public_did(request: web.BaseRequest): return web.json_response({"result": format_did_info(info)}) -@docs(tags=["wallet"], summary="Assign the current public DID") +@docs(tags=[WALLET_TAG_TITLE], summary="Assign the current public DID") @querystring_schema(DIDQueryStringSchema()) @querystring_schema(CreateAttribTxnForEndorserOptionSchema()) @querystring_schema(AttribConnIdMatchInfoSchema()) @@ -952,7 +955,10 @@ async def promote_wallet_public_did( return info, attrib_def -@docs(tags=["wallet"], summary="Update endpoint in wallet and on ledger if posted to it") +@docs( + tags=[WALLET_TAG_TITLE], + summary="Update endpoint in wallet and on ledger if posted to it", +) @request_schema(DIDEndpointWithTypeSchema) @querystring_schema(CreateAttribTxnForEndorserOptionSchema()) @querystring_schema(AttribConnIdMatchInfoSchema()) @@ -1083,7 +1089,7 @@ async def wallet_set_did_endpoint(request: web.BaseRequest): return web.json_response({"txn": transaction.serialize()}) -@docs(tags=["wallet"], summary="Create a jws using did keys with a given payload") +@docs(tags=[WALLET_TAG_TITLE], summary="Create a jws using did keys with a given payload") @request_schema(JWSCreateSchema) @response_schema(WalletModuleResponseSchema(), description="") @tenant_authentication @@ -1121,7 +1127,10 @@ async def wallet_jwt_sign(request: web.BaseRequest): return web.json_response(jws) -@docs(tags=["wallet"], summary="Create an sd-jws using did keys with a given payload") +@docs( + tags=[WALLET_TAG_TITLE], + summary="Create an sd-jws using did keys with a given payload", +) @request_schema(SDJWSCreateSchema) @response_schema(WalletModuleResponseSchema(), description="") @tenant_authentication @@ -1163,7 +1172,7 @@ async def wallet_sd_jwt_sign(request: web.BaseRequest): return web.json_response(sd_jws) -@docs(tags=["wallet"], summary="Verify a jws using did keys with a given JWS") +@docs(tags=[WALLET_TAG_TITLE], summary="Verify a jws using did keys with a given JWS") @request_schema(JWSVerifySchema()) @response_schema(JWSVerifyResponseSchema(), 200, description="") @tenant_authentication @@ -1204,7 +1213,7 @@ async def wallet_jwt_verify(request: web.BaseRequest): @docs( - tags=["wallet"], + tags=[WALLET_TAG_TITLE], summary="Verify an sd-jws using did keys with a given SD-JWS with " "optional key binding", ) @@ -1239,7 +1248,7 @@ async def wallet_sd_jwt_verify(request: web.BaseRequest): return web.json_response(result.serialize()) -@docs(tags=["wallet"], summary="Query DID endpoint in wallet") +@docs(tags=[WALLET_TAG_TITLE], summary="Query DID endpoint in wallet") @querystring_schema(DIDQueryStringSchema()) @response_schema(DIDEndpointSchema, 200, description="") @tenant_authentication @@ -1273,7 +1282,9 @@ async def wallet_get_did_endpoint(request: web.BaseRequest): return web.json_response({"did": did, "endpoint": endpoint}) -@docs(tags=["wallet"], summary="Rotate keypair for a DID not posted to the ledger") +@docs( + tags=[WALLET_TAG_TITLE], summary="Rotate keypair for a DID not posted to the ledger" +) @querystring_schema(DIDQueryStringSchema()) @response_schema(WalletModuleResponseSchema(), description="") @tenant_authentication @@ -1329,11 +1340,9 @@ class UpgradeResultSchema(OpenAPISchema): @docs( - tags=["anoncreds - wallet upgrade"], - summary=""" - Upgrade the wallet from askar to anoncreds - Be very careful with this! You - cannot go back! See migration guide for more information. - """, + tags=[UPGRADE_TAG_TITLE], + summary="Upgrade the wallet from askar to askar-anoncreds. Be very careful with this!" + " You cannot go back! See migration guide for more information.", ) @querystring_schema(UpgradeVerificationSchema()) @response_schema(UpgradeResultSchema(), description="") @@ -1484,7 +1493,7 @@ def post_process_routes(app: web.Application): app._state["swagger_dict"]["tags"] = [] app._state["swagger_dict"]["tags"].append( { - "name": "wallet", + "name": WALLET_TAG_TITLE, "description": "DID and tag policy management", "externalDocs": { "description": "Design", @@ -1497,7 +1506,7 @@ def post_process_routes(app: web.Application): ) app._state["swagger_dict"]["tags"].append( { - "name": "AnonCreds - Wallet Upgrade", + "name": UPGRADE_TAG_TITLE, "description": "AnonCreds wallet upgrade", "externalDocs": { "description": "Specification", diff --git a/open-api/openapi.json b/open-api/openapi.json index c33c50f766..f14a618e8e 100644 --- a/open-api/openapi.json +++ b/open-api/openapi.json @@ -394,7 +394,7 @@ } }, "summary" : "Create a credential definition on the connected datastore", - "tags" : [ "anoncreds - credential definitions" ], + "tags" : [ "AnonCreds - Credential Definitions" ], "x-codegen-request-body-name" : "body" } }, @@ -422,7 +422,7 @@ } }, "summary" : "Retrieve an individual credential definition details", - "tags" : [ "anoncreds - credential definitions" ] + "tags" : [ "AnonCreds - Credential Definitions" ] } }, "/anoncreds/credential-definitions" : { @@ -469,7 +469,7 @@ } }, "summary" : "Retrieve all credential definition ids", - "tags" : [ "anoncreds - credential definitions" ] + "tags" : [ "AnonCreds - Credential Definitions" ] } }, "/anoncreds/registry/{rev_reg_id}/active" : { @@ -497,7 +497,7 @@ } }, "summary" : "Update the active registry", - "tags" : [ "anoncreds - revocation" ] + "tags" : [ "AnonCreds - Revocation" ] } }, "/anoncreds/registry/{rev_reg_id}/tails-file" : { @@ -525,7 +525,7 @@ } }, "summary" : "Upload local tails file to server", - "tags" : [ "anoncreds - revocation" ] + "tags" : [ "AnonCreds - Revocation" ] } }, "/anoncreds/revocation-list" : { @@ -553,7 +553,7 @@ } }, "summary" : "Create and publish a revocation status list on the connected datastore", - "tags" : [ "anoncreds - revocation" ], + "tags" : [ "AnonCreds - Revocation" ], "x-codegen-request-body-name" : "body" } }, @@ -582,7 +582,7 @@ } }, "summary" : "Create and publish a registration revocation on the connected datastore", - "tags" : [ "anoncreds - revocation" ], + "tags" : [ "AnonCreds - Revocation" ], "x-codegen-request-body-name" : "body" } }, @@ -1016,7 +1016,7 @@ } }, "summary" : "Create a schema on the connected datastore", - "tags" : [ "anoncreds - schemas" ], + "tags" : [ "AnonCreds - Schemas" ], "x-codegen-request-body-name" : "body" } }, @@ -1044,7 +1044,7 @@ } }, "summary" : "Retrieve an individual schemas details", - "tags" : [ "anoncreds - schemas" ] + "tags" : [ "AnonCreds - Schemas" ] } }, "/anoncreds/schemas" : { @@ -1084,7 +1084,7 @@ } }, "summary" : "Retrieve all schema ids", - "tags" : [ "anoncreds - schemas" ] + "tags" : [ "AnonCreds - Schemas" ] } }, "/anoncreds/wallet/upgrade" : { @@ -1110,8 +1110,8 @@ "description" : "" } }, - "summary" : "\n Upgrade the wallet from askar to anoncreds - Be very careful with this! You \n cannot go back! See migration guide for more information.\n ", - "tags" : [ "anoncreds - wallet upgrade" ] + "summary" : "Upgrade the wallet from askar to askar-anoncreds. Be very careful with this! You cannot go back! See migration guide for more information.", + "tags" : [ "AnonCreds - Wallet Upgrade" ] } }, "/connections" : { diff --git a/open-api/swagger.json b/open-api/swagger.json index f6eabbc786..5c7422c890 100644 --- a/open-api/swagger.json +++ b/open-api/swagger.json @@ -335,7 +335,7 @@ }, "/anoncreds/credential-definition" : { "post" : { - "tags" : [ "anoncreds - credential definitions" ], + "tags" : [ "AnonCreds - Credential Definitions" ], "summary" : "Create a credential definition on the connected datastore", "produces" : [ "application/json" ], "parameters" : [ { @@ -358,7 +358,7 @@ }, "/anoncreds/credential-definition/{cred_def_id}" : { "get" : { - "tags" : [ "anoncreds - credential definitions" ], + "tags" : [ "AnonCreds - Credential Definitions" ], "summary" : "Retrieve an individual credential definition details", "produces" : [ "application/json" ], "parameters" : [ { @@ -380,7 +380,7 @@ }, "/anoncreds/credential-definitions" : { "get" : { - "tags" : [ "anoncreds - credential definitions" ], + "tags" : [ "AnonCreds - Credential Definitions" ], "summary" : "Retrieve all credential definition ids", "produces" : [ "application/json" ], "parameters" : [ { @@ -420,7 +420,7 @@ }, "/anoncreds/registry/{rev_reg_id}/active" : { "put" : { - "tags" : [ "anoncreds - revocation" ], + "tags" : [ "AnonCreds - Revocation" ], "summary" : "Update the active registry", "produces" : [ "application/json" ], "parameters" : [ { @@ -443,7 +443,7 @@ }, "/anoncreds/registry/{rev_reg_id}/tails-file" : { "put" : { - "tags" : [ "anoncreds - revocation" ], + "tags" : [ "AnonCreds - Revocation" ], "summary" : "Upload local tails file to server", "produces" : [ "application/json" ], "parameters" : [ { @@ -466,7 +466,7 @@ }, "/anoncreds/revocation-list" : { "post" : { - "tags" : [ "anoncreds - revocation" ], + "tags" : [ "AnonCreds - Revocation" ], "summary" : "Create and publish a revocation status list on the connected datastore", "produces" : [ "application/json" ], "parameters" : [ { @@ -489,7 +489,7 @@ }, "/anoncreds/revocation-registry-definition" : { "post" : { - "tags" : [ "anoncreds - revocation" ], + "tags" : [ "AnonCreds - Revocation" ], "summary" : "Create and publish a registration revocation on the connected datastore", "produces" : [ "application/json" ], "parameters" : [ { @@ -845,7 +845,7 @@ }, "/anoncreds/schema" : { "post" : { - "tags" : [ "anoncreds - schemas" ], + "tags" : [ "AnonCreds - Schemas" ], "summary" : "Create a schema on the connected datastore", "produces" : [ "application/json" ], "parameters" : [ { @@ -868,7 +868,7 @@ }, "/anoncreds/schema/{schema_id}" : { "get" : { - "tags" : [ "anoncreds - schemas" ], + "tags" : [ "AnonCreds - Schemas" ], "summary" : "Retrieve an individual schemas details", "produces" : [ "application/json" ], "parameters" : [ { @@ -890,7 +890,7 @@ }, "/anoncreds/schemas" : { "get" : { - "tags" : [ "anoncreds - schemas" ], + "tags" : [ "AnonCreds - Schemas" ], "summary" : "Retrieve all schema ids", "produces" : [ "application/json" ], "parameters" : [ { @@ -924,8 +924,8 @@ }, "/anoncreds/wallet/upgrade" : { "post" : { - "tags" : [ "anoncreds - wallet upgrade" ], - "summary" : "\n Upgrade the wallet from askar to anoncreds - Be very careful with this! You \n cannot go back! See migration guide for more information.\n ", + "tags" : [ "AnonCreds - Wallet Upgrade" ], + "summary" : "Upgrade the wallet from askar to askar-anoncreds. Be very careful with this! You cannot go back! See migration guide for more information.", "produces" : [ "application/json" ], "parameters" : [ { "name" : "wallet_name", From 785ec4baf36ec91148ab92f7fee3c736e64a4761 Mon Sep 17 00:00:00 2001 From: Mourits de Beer <31511766+ff137@users.noreply.github.com> Date: Wed, 16 Apr 2025 19:08:16 +0200 Subject: [PATCH 09/32] :test_tube: Fix test warnings (#3656) * :white_check_mark: Correctly tear down ClientSession in TestAdminServer Signed-off-by: ff137 * :art: Ignore create_did_document deprecation warning in test run Signed-off-by: ff137 * :test_tube: async test fixtures should use pytest_asyncio.fixture Signed-off-by: ff137 * :art: Add missing pytest.mark.asyncio Signed-off-by: ff137 * :art: Ignore create_did_document deprecation warning in test run Signed-off-by: ff137 --------- Signed-off-by: ff137 Co-authored-by: Stephen Curran --- acapy_agent/admin/tests/test_admin_server.py | 9 ++++++++- acapy_agent/askar/didcomm/tests/test_v2.py | 3 ++- acapy_agent/cache/tests/test_in_memory_cache.py | 3 ++- acapy_agent/connections/tests/test_base_manager.py | 4 ++++ acapy_agent/indy/models/tests/test_pres_preview.py | 1 + acapy_agent/ledger/tests/test_indy_vdr.py | 3 ++- .../messaging/decorators/tests/test_attach_decorator.py | 3 ++- acapy_agent/messaging/jsonld/tests/test_routes.py | 5 +++-- acapy_agent/multitenant/tests/test_route_manager.py | 5 +++-- .../v1_0/handlers/tests/test_basicmessage_handler.py | 3 ++- .../v1_0/handlers/tests/test_keylist_handler.py | 5 +++-- .../v1_0/handlers/tests/test_mediation_grant_handler.py | 5 +++-- .../v1_0/handlers/tests/test_problem_report_handler.py | 7 ++++--- .../v1_0/models/tests/test_mediation_record.py | 3 ++- .../v1_0/tests/test_mediation_manager.py | 5 +++-- .../v1_0/tests/test_route_manager.py | 3 ++- .../did_rotate/v1_0/handlers/tests/test_ack_handler.py | 3 ++- .../v1_0/handlers/tests/test_hangup_handler.py | 3 ++- .../v1_0/handlers/tests/test_problem_report_handler.py | 3 ++- .../v1_0/handlers/tests/test_rotate_handler.py | 3 ++- .../v1_0/handlers/tests/test_complete_handler.py | 3 ++- .../v1_0/handlers/tests/test_invitation_handler.py | 3 ++- .../v1_0/handlers/tests/test_problem_report_handler.py | 3 ++- .../protocols/didexchange/v1_0/tests/test_manager.py | 4 ++++ .../v1_0/handlers/tests/test_disclose_handler.py | 3 ++- .../discovery/v1_0/handlers/tests/test_query_handler.py | 3 ++- .../v2_0/handlers/tests/test_disclosures_handler.py | 3 ++- .../v2_0/handlers/tests/test_queries_handler.py | 3 ++- .../v1_0/handlers/tests/test_problem_report_handler.py | 7 ++++--- .../v1_0/handlers/tests/test_reuse_accept_handler.py | 7 ++++--- .../v1_0/handlers/tests/test_reuse_handler.py | 5 +++-- .../out_of_band/v1_0/models/tests/test_out_of_band.py | 3 ++- .../protocols/problem_report/v1_0/tests/test_handler.py | 3 ++- .../v1_0/handlers/tests/test_revoke_handler.py | 3 ++- .../v1_0/models/tests/test_rev_notification_record.py | 3 ++- .../revocation_notification/v1_0/tests/test_routes.py | 3 ++- .../v2_0/handlers/tests/test_revoke_handler.py | 3 ++- .../v2_0/models/tests/test_rev_notification_record.py | 3 ++- .../revocation_notification/v2_0/tests/test_routes.py | 3 ++- .../trustping/v1_0/handlers/tests/test_ping_handler.py | 3 ++- .../v1_0/handlers/tests/test_ping_response_handler.py | 3 ++- acapy_agent/resolver/default/tests/test_indy.py | 3 ++- acapy_agent/resolver/default/tests/test_jwk.py | 3 ++- acapy_agent/resolver/default/tests/test_key.py | 3 ++- acapy_agent/resolver/default/tests/test_legacy_peer.py | 3 ++- acapy_agent/resolver/default/tests/test_peer2.py | 3 ++- acapy_agent/resolver/default/tests/test_peer3.py | 5 +++-- acapy_agent/resolver/default/tests/test_peer4.py | 5 +++-- acapy_agent/resolver/default/tests/test_universal.py | 5 +++-- acapy_agent/resolver/default/tests/test_webvh.py | 3 ++- acapy_agent/resolver/tests/test_did_resolver.py | 3 ++- acapy_agent/resolver/tests/test_routes.py | 3 ++- acapy_agent/settings/tests/test_routes.py | 5 +++-- acapy_agent/storage/tests/test_askar_storage.py | 3 ++- .../storage/vc_holder/tests/test_askar_vc_holder.py | 3 ++- acapy_agent/vc/vc_di/tests/test_manager.py | 3 ++- acapy_agent/vc/vc_di/tests/test_prove.py | 3 ++- 57 files changed, 139 insertions(+), 70 deletions(-) diff --git a/acapy_agent/admin/tests/test_admin_server.py b/acapy_agent/admin/tests/test_admin_server.py index 52aad61761..d7af57c66f 100644 --- a/acapy_agent/admin/tests/test_admin_server.py +++ b/acapy_agent/admin/tests/test_admin_server.py @@ -5,6 +5,7 @@ import jwt import pytest +import pytest_asyncio from aiohttp import ClientSession, DummyCookieJar, TCPConnector, web from aiohttp.test_utils import unused_port from marshmallow import ValidationError @@ -47,6 +48,12 @@ async def asyncSetUp(self): cookie_jar=DummyCookieJar(), connector=self.connector ) + async def asyncTearDown(self): + if self.client_session: + await self.client_session.close() + if self.connector: + await self.connector.close() + async def test_debug_middleware(self): with mock.patch.object(test_module, "LOGGER", mock.MagicMock()) as mock_logger: mock_logger.isEnabledFor = mock.MagicMock(return_value=True) @@ -571,7 +578,7 @@ async def test_upgrade_middleware(self): await test_module.upgrade_middleware(request, handler) -@pytest.fixture +@pytest_asyncio.fixture async def server(): test_class = TestAdminServer() await test_class.asyncSetUp() diff --git a/acapy_agent/askar/didcomm/tests/test_v2.py b/acapy_agent/askar/didcomm/tests/test_v2.py index fc01f78612..f80e2581d8 100644 --- a/acapy_agent/askar/didcomm/tests/test_v2.py +++ b/acapy_agent/askar/didcomm/tests/test_v2.py @@ -2,6 +2,7 @@ from unittest import mock import pytest +import pytest_asyncio from aries_askar import AskarError, Key, KeyAlg, Session from ....utils.jwe import JweEnvelope, JweRecipient, b64url @@ -14,7 +15,7 @@ MESSAGE = b"Expecto patronum" -@pytest.fixture() +@pytest_asyncio.fixture async def session(): profile = await create_test_profile() async with profile.session() as session: diff --git a/acapy_agent/cache/tests/test_in_memory_cache.py b/acapy_agent/cache/tests/test_in_memory_cache.py index 381b60ab53..342dcad523 100644 --- a/acapy_agent/cache/tests/test_in_memory_cache.py +++ b/acapy_agent/cache/tests/test_in_memory_cache.py @@ -1,12 +1,13 @@ from asyncio import sleep, wait_for import pytest +import pytest_asyncio from ..base import CacheError from ..in_memory import InMemoryCache -@pytest.fixture() +@pytest_asyncio.fixture async def cache(): cache = InMemoryCache() await cache.set("valid key", "value") diff --git a/acapy_agent/connections/tests/test_base_manager.py b/acapy_agent/connections/tests/test_base_manager.py index 958f567bb3..605d9e459b 100644 --- a/acapy_agent/connections/tests/test_base_manager.py +++ b/acapy_agent/connections/tests/test_base_manager.py @@ -1039,6 +1039,7 @@ async def test_get_conn_targets_invitation_no_cache(self): assert target.routing_keys == [self.test_verkey] assert target.sender_key == local_did.verkey + @pytest.mark.filterwarnings("ignore::UserWarning") # create_did_document deprecation async def test_create_static_connection(self): self.multitenant_mgr = mock.MagicMock(MultitenantManager, autospec=True) self.multitenant_mgr.get_default_mediator = mock.CoroutineMock(return_value=None) @@ -1053,6 +1054,7 @@ async def test_create_static_connection(self): assert ConnRecord.State.get(conn_rec.state) is ConnRecord.State.COMPLETED + @pytest.mark.filterwarnings("ignore::UserWarning") # create_did_document deprecation async def test_create_static_connection_multitenant(self): self.context.update_settings( {"wallet.id": "test_wallet", "multitenant.enabled": True} @@ -1084,6 +1086,7 @@ async def test_create_static_connection_multitenant(self): self.route_manager.route_static.assert_called_once() + @pytest.mark.filterwarnings("ignore::UserWarning") # create_did_document deprecation async def test_create_static_connection_multitenant_auto_disclose_features(self): self.context.update_settings( { @@ -1193,6 +1196,7 @@ async def test_create_static_connection_no_their(self): their_endpoint=self.test_endpoint, ) + @pytest.mark.filterwarnings("ignore::UserWarning") # create_did_document deprecation async def test_create_static_connection_their_seed_only(self): self.multitenant_mgr = mock.MagicMock(MultitenantManager, autospec=True) self.multitenant_mgr.get_default_mediator = mock.CoroutineMock(return_value=None) diff --git a/acapy_agent/indy/models/tests/test_pres_preview.py b/acapy_agent/indy/models/tests/test_pres_preview.py index 7e89720bb7..4d3cf396fb 100644 --- a/acapy_agent/indy/models/tests/test_pres_preview.py +++ b/acapy_agent/indy/models/tests/test_pres_preview.py @@ -367,6 +367,7 @@ async def test_to_indy_proof_request_attr_names(self): assert indy_proof_req == INDY_PROOF_REQ_ATTR_NAMES + @pytest.mark.asyncio async def test_to_indy_proof_request_self_attested(self): """Test presentation preview to indy proof request with self-attested values.""" diff --git a/acapy_agent/ledger/tests/test_indy_vdr.py b/acapy_agent/ledger/tests/test_indy_vdr.py index 9b0ee8ce3c..0a6a885874 100644 --- a/acapy_agent/ledger/tests/test_indy_vdr.py +++ b/acapy_agent/ledger/tests/test_indy_vdr.py @@ -2,6 +2,7 @@ import indy_vdr import pytest +import pytest_asyncio from ...anoncreds.default.legacy_indy.registry import LegacyIndyRegistry from ...cache.base import BaseCache @@ -34,7 +35,7 @@ ) -@pytest.fixture() +@pytest_asyncio.fixture async def ledger(): did_methods = DIDMethods() did_methods.register(WEB) diff --git a/acapy_agent/messaging/decorators/tests/test_attach_decorator.py b/acapy_agent/messaging/decorators/tests/test_attach_decorator.py index 5466c802d7..8ee3f9f096 100644 --- a/acapy_agent/messaging/decorators/tests/test_attach_decorator.py +++ b/acapy_agent/messaging/decorators/tests/test_attach_decorator.py @@ -4,6 +4,7 @@ from unittest import TestCase import pytest +import pytest_asyncio from uuid_utils import uuid4 from ....messaging.models.base import BaseModelError @@ -76,7 +77,7 @@ def seed(): return [f"TestWalletSignVerifyAttachDeco0{i}" for i in [0, 1]] -@pytest.fixture() +@pytest_asyncio.fixture async def wallet(): profile = await create_test_profile() profile.context.injector.bind_instance(DIDMethods, DIDMethods()) diff --git a/acapy_agent/messaging/jsonld/tests/test_routes.py b/acapy_agent/messaging/jsonld/tests/test_routes.py index b7b1799d7f..df08f2d669 100644 --- a/acapy_agent/messaging/jsonld/tests/test_routes.py +++ b/acapy_agent/messaging/jsonld/tests/test_routes.py @@ -3,6 +3,7 @@ from unittest import IsolatedAsyncioTestCase import pytest +import pytest_asyncio from aiohttp import web from pyld import jsonld @@ -81,7 +82,7 @@ def mock_verify_credential(): test_module.verify_credential = temp -@pytest.fixture +@pytest_asyncio.fixture async def mock_sign_request(mock_sign_credential): profile = await create_test_profile( settings={ @@ -139,7 +140,7 @@ def request_body(): } -@pytest.fixture +@pytest_asyncio.fixture async def mock_verify_request(mock_verify_credential, mock_resolver, request_body): profile = await create_test_profile( settings={ diff --git a/acapy_agent/multitenant/tests/test_route_manager.py b/acapy_agent/multitenant/tests/test_route_manager.py index 245102e02d..61adab321b 100644 --- a/acapy_agent/multitenant/tests/test_route_manager.py +++ b/acapy_agent/multitenant/tests/test_route_manager.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from ...core.profile import Profile from ...messaging.responder import BaseResponder, MockResponder @@ -25,14 +26,14 @@ def mock_responder(): yield MockResponder() -@pytest.fixture +@pytest_asyncio.fixture async def root_profile(mock_responder: MockResponder): profile = await create_test_profile() profile.context.injector.bind_instance(BaseResponder, mock_responder) yield profile -@pytest.fixture +@pytest_asyncio.fixture async def sub_profile(mock_responder: MockResponder): profile = await create_test_profile() profile.context.injector.bind_instance(BaseResponder, mock_responder) diff --git a/acapy_agent/protocols/basicmessage/v1_0/handlers/tests/test_basicmessage_handler.py b/acapy_agent/protocols/basicmessage/v1_0/handlers/tests/test_basicmessage_handler.py index 85cd07f2b4..ee723c5327 100644 --- a/acapy_agent/protocols/basicmessage/v1_0/handlers/tests/test_basicmessage_handler.py +++ b/acapy_agent/protocols/basicmessage/v1_0/handlers/tests/test_basicmessage_handler.py @@ -1,6 +1,7 @@ from unittest import mock import pytest +import pytest_asyncio from ......core.event_bus import Event, EventBus, MockEventBus from ......messaging.decorators.localization_decorator import LocalizationDecorator @@ -11,7 +12,7 @@ from ...messages.basicmessage import BasicMessage -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): yield RequestContext.test_context(await create_test_profile()) diff --git a/acapy_agent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_handler.py b/acapy_agent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_handler.py index 377f2a7743..689c3e1690 100644 --- a/acapy_agent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_handler.py +++ b/acapy_agent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_handler.py @@ -3,6 +3,7 @@ import logging import pytest +import pytest_asyncio from ......connections.models.conn_record import ConnRecord from ......messaging.base_handler import HandlerException @@ -17,7 +18,7 @@ pytestmark = pytest.mark.asyncio -@pytest.fixture +@pytest_asyncio.fixture async def context(): """Fixture for context used in tests.""" # pylint: disable=W0621 @@ -28,7 +29,7 @@ async def context(): yield context -@pytest.fixture +@pytest_asyncio.fixture async def session(context): # pylint: disable=W0621 """Fixture for session used in tests""" yield await context.session() diff --git a/acapy_agent/protocols/coordinate_mediation/v1_0/handlers/tests/test_mediation_grant_handler.py b/acapy_agent/protocols/coordinate_mediation/v1_0/handlers/tests/test_mediation_grant_handler.py index e494e4c83a..62fcb48248 100644 --- a/acapy_agent/protocols/coordinate_mediation/v1_0/handlers/tests/test_mediation_grant_handler.py +++ b/acapy_agent/protocols/coordinate_mediation/v1_0/handlers/tests/test_mediation_grant_handler.py @@ -1,6 +1,7 @@ """Test mediate grant message handler.""" import pytest +import pytest_asyncio from acapy_agent.core.profile import ProfileSession from acapy_agent.tests import mock @@ -23,7 +24,7 @@ TEST_ENDPOINT = "https://example.com" -@pytest.fixture() +@pytest_asyncio.fixture async def context(): context = RequestContext.test_context(await create_test_profile()) context.message = MediationGrant(endpoint=TEST_ENDPOINT, routing_keys=[TEST_VERKEY]) @@ -32,7 +33,7 @@ async def context(): yield context -@pytest.fixture() +@pytest_asyncio.fixture async def session(context: RequestContext): yield await context.session() diff --git a/acapy_agent/protocols/coordinate_mediation/v1_0/handlers/tests/test_problem_report_handler.py b/acapy_agent/protocols/coordinate_mediation/v1_0/handlers/tests/test_problem_report_handler.py index 5cbc7ed501..b448df8b22 100644 --- a/acapy_agent/protocols/coordinate_mediation/v1_0/handlers/tests/test_problem_report_handler.py +++ b/acapy_agent/protocols/coordinate_mediation/v1_0/handlers/tests/test_problem_report_handler.py @@ -1,6 +1,7 @@ """Test Problem Report Handler.""" import pytest +import pytest_asyncio from ......connections.models.conn_record import ConnRecord from ......messaging.request_context import RequestContext @@ -11,14 +12,14 @@ from ...messages.problem_report import CMProblemReport, ProblemReportReason -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): ctx = RequestContext.test_context(await create_test_profile()) ctx.message_receipt = MessageReceipt() yield ctx -@pytest.fixture() +@pytest_asyncio.fixture async def connection_record(request_context, session): record = ConnRecord() request_context.connection_record = record @@ -26,7 +27,7 @@ async def connection_record(request_context, session): yield record -@pytest.fixture() +@pytest_asyncio.fixture async def session(request_context): yield await request_context.session() diff --git a/acapy_agent/protocols/coordinate_mediation/v1_0/models/tests/test_mediation_record.py b/acapy_agent/protocols/coordinate_mediation/v1_0/models/tests/test_mediation_record.py index 7b5a2a6444..db4fbe5a68 100644 --- a/acapy_agent/protocols/coordinate_mediation/v1_0/models/tests/test_mediation_record.py +++ b/acapy_agent/protocols/coordinate_mediation/v1_0/models/tests/test_mediation_record.py @@ -3,6 +3,7 @@ import json import pytest +import pytest_asyncio from ......core.profile import ProfileSession from ......storage.base import BaseStorage @@ -11,7 +12,7 @@ from ..mediation_record import MediationRecord -@pytest.fixture() +@pytest_asyncio.fixture async def session(): profile = await create_test_profile() async with profile.session() as session: diff --git a/acapy_agent/protocols/coordinate_mediation/v1_0/tests/test_mediation_manager.py b/acapy_agent/protocols/coordinate_mediation/v1_0/tests/test_mediation_manager.py index c0ef5c6855..b52114ef97 100644 --- a/acapy_agent/protocols/coordinate_mediation/v1_0/tests/test_mediation_manager.py +++ b/acapy_agent/protocols/coordinate_mediation/v1_0/tests/test_mediation_manager.py @@ -3,6 +3,7 @@ from typing import AsyncIterable, Iterable import pytest +import pytest_asyncio from .....core.event_bus import EventBus from .....core.profile import Profile, ProfileSession @@ -38,7 +39,7 @@ pytestmark = pytest.mark.asyncio -@pytest.fixture +@pytest_asyncio.fixture async def profile(): """Fixture for profile used in tests.""" profile = await create_test_profile() @@ -52,7 +53,7 @@ def mock_event_bus(profile: Profile): yield profile.inject(EventBus) -@pytest.fixture +@pytest_asyncio.fixture async def session(profile) -> AsyncIterable[ProfileSession]: # pylint: disable=W0621 """Fixture for profile session used in tests.""" async with profile.session() as session: diff --git a/acapy_agent/protocols/coordinate_mediation/v1_0/tests/test_route_manager.py b/acapy_agent/protocols/coordinate_mediation/v1_0/tests/test_route_manager.py index 86d9cc6bff..a27487ae6c 100644 --- a/acapy_agent/protocols/coordinate_mediation/v1_0/tests/test_route_manager.py +++ b/acapy_agent/protocols/coordinate_mediation/v1_0/tests/test_route_manager.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from .....connections.models.conn_record import ConnRecord from .....core.profile import Profile @@ -42,7 +43,7 @@ def mock_responder(): yield MockResponder() -@pytest.fixture +@pytest_asyncio.fixture async def profile(mock_responder: MockResponder): profile = await create_test_profile() profile.context.injector.bind_instance(BaseResponder, mock_responder) diff --git a/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_ack_handler.py b/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_ack_handler.py index 594863ae55..98738930d0 100644 --- a/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_ack_handler.py +++ b/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_ack_handler.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from ......messaging.request_context import RequestContext from ......messaging.responder import MockResponder @@ -8,7 +9,7 @@ from .. import ack_handler as test_module -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): ctx = RequestContext.test_context(await create_test_profile()) yield ctx diff --git a/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_hangup_handler.py b/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_hangup_handler.py index 164cdd3729..8e8e8a76ad 100644 --- a/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_hangup_handler.py +++ b/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_hangup_handler.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from ......messaging.request_context import RequestContext from ......messaging.responder import MockResponder @@ -8,7 +9,7 @@ from .. import hangup_handler as test_module -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): ctx = RequestContext.test_context(await create_test_profile()) yield ctx diff --git a/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_problem_report_handler.py b/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_problem_report_handler.py index a39a2e7dfd..8f888093da 100644 --- a/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_problem_report_handler.py +++ b/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_problem_report_handler.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from ......messaging.request_context import RequestContext from ......messaging.responder import MockResponder @@ -12,7 +13,7 @@ } -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): yield RequestContext.test_context(await create_test_profile()) diff --git a/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_rotate_handler.py b/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_rotate_handler.py index 5d1c648018..5369fe52b0 100644 --- a/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_rotate_handler.py +++ b/acapy_agent/protocols/did_rotate/v1_0/handlers/tests/test_rotate_handler.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from ......messaging.request_context import RequestContext from ......messaging.responder import MockResponder @@ -12,7 +13,7 @@ } -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): yield RequestContext.test_context(await create_test_profile()) diff --git a/acapy_agent/protocols/didexchange/v1_0/handlers/tests/test_complete_handler.py b/acapy_agent/protocols/didexchange/v1_0/handlers/tests/test_complete_handler.py index 5684c16e58..0bd4cba1a0 100644 --- a/acapy_agent/protocols/didexchange/v1_0/handlers/tests/test_complete_handler.py +++ b/acapy_agent/protocols/didexchange/v1_0/handlers/tests/test_complete_handler.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from acapy_agent.tests import mock @@ -13,7 +14,7 @@ from .. import complete_handler as test_module -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): ctx = RequestContext.test_context(await create_test_profile()) ctx.injector.bind_instance(DIDMethods, DIDMethods()) diff --git a/acapy_agent/protocols/didexchange/v1_0/handlers/tests/test_invitation_handler.py b/acapy_agent/protocols/didexchange/v1_0/handlers/tests/test_invitation_handler.py index 3cbbe637ec..4bce3b5ee1 100644 --- a/acapy_agent/protocols/didexchange/v1_0/handlers/tests/test_invitation_handler.py +++ b/acapy_agent/protocols/didexchange/v1_0/handlers/tests/test_invitation_handler.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from ......messaging.request_context import RequestContext from ......messaging.responder import MockResponder @@ -10,7 +11,7 @@ from ...messages.problem_report import DIDXProblemReport, ProblemReportReason -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): ctx = RequestContext.test_context(await create_test_profile()) ctx.injector.bind_instance(DIDMethods, DIDMethods()) diff --git a/acapy_agent/protocols/didexchange/v1_0/handlers/tests/test_problem_report_handler.py b/acapy_agent/protocols/didexchange/v1_0/handlers/tests/test_problem_report_handler.py index 5a1c49e8bd..55d615052f 100644 --- a/acapy_agent/protocols/didexchange/v1_0/handlers/tests/test_problem_report_handler.py +++ b/acapy_agent/protocols/didexchange/v1_0/handlers/tests/test_problem_report_handler.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from acapy_agent.tests import mock @@ -11,7 +12,7 @@ from .. import problem_report_handler as test_module -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): yield RequestContext.test_context(await create_test_profile()) diff --git a/acapy_agent/protocols/didexchange/v1_0/tests/test_manager.py b/acapy_agent/protocols/didexchange/v1_0/tests/test_manager.py index 5dae863727..5683f82433 100644 --- a/acapy_agent/protocols/didexchange/v1_0/tests/test_manager.py +++ b/acapy_agent/protocols/didexchange/v1_0/tests/test_manager.py @@ -1,6 +1,7 @@ import json from unittest import IsolatedAsyncioTestCase +import pytest from pydid import DIDDocument from .....admin.server import AdminResponder @@ -162,6 +163,7 @@ async def test_verify_diddoc(self): with self.assertRaises(DIDXManagerError): await self.manager.verify_diddoc(wallet, did_doc_attach) + @pytest.mark.filterwarnings("ignore::UserWarning") # create_did_document deprecation async def test_receive_invitation(self): async with self.profile.session() as session: self.profile.context.update_settings({"public_invites": True}) @@ -198,6 +200,7 @@ async def test_receive_invitation(self): assert invitee_record.state == ConnRecord.State.REQUEST.rfc23 assert mock_send_reply.called + @pytest.mark.filterwarnings("ignore::UserWarning") # create_did_document deprecation async def test_receive_invitation_oob_public_did(self): async with self.profile.session() as session: wallet = session.inject(BaseWallet) @@ -1547,6 +1550,7 @@ async def test_create_response(self): await self.manager.create_response(conn_rec, "http://10.20.30.40:5060/") + @pytest.mark.filterwarnings("ignore::UserWarning") # create_did_document deprecation async def test_create_response_mediation_id(self): async with self.profile.session() as session: mediation_record = MediationRecord( diff --git a/acapy_agent/protocols/discovery/v1_0/handlers/tests/test_disclose_handler.py b/acapy_agent/protocols/discovery/v1_0/handlers/tests/test_disclose_handler.py index e25bfef037..77bca80c8b 100644 --- a/acapy_agent/protocols/discovery/v1_0/handlers/tests/test_disclose_handler.py +++ b/acapy_agent/protocols/discovery/v1_0/handlers/tests/test_disclose_handler.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from ......core.protocol_registry import ProtocolRegistry from ......messaging.base_handler import HandlerException @@ -16,7 +17,7 @@ TEST_MESSAGE_TYPE = TEST_MESSAGE_FAMILY + "/message" -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): ctx = RequestContext.test_context(await create_test_profile()) ctx.connection_ready = True diff --git a/acapy_agent/protocols/discovery/v1_0/handlers/tests/test_query_handler.py b/acapy_agent/protocols/discovery/v1_0/handlers/tests/test_query_handler.py index 9e428c958a..8251d4a963 100644 --- a/acapy_agent/protocols/discovery/v1_0/handlers/tests/test_query_handler.py +++ b/acapy_agent/protocols/discovery/v1_0/handlers/tests/test_query_handler.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from ......core.protocol_registry import ProtocolRegistry from ......messaging.request_context import RequestContext @@ -13,7 +14,7 @@ TEST_MESSAGE_TYPE = TEST_MESSAGE_FAMILY + "/message" -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): ctx = RequestContext.test_context(await create_test_profile()) registry = ProtocolRegistry() diff --git a/acapy_agent/protocols/discovery/v2_0/handlers/tests/test_disclosures_handler.py b/acapy_agent/protocols/discovery/v2_0/handlers/tests/test_disclosures_handler.py index 11bf227dae..5d1d8ba527 100644 --- a/acapy_agent/protocols/discovery/v2_0/handlers/tests/test_disclosures_handler.py +++ b/acapy_agent/protocols/discovery/v2_0/handlers/tests/test_disclosures_handler.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from ......core.protocol_registry import ProtocolRegistry from ......messaging.base_handler import HandlerException @@ -17,7 +18,7 @@ TEST_MESSAGE_TYPE = TEST_MESSAGE_FAMILY + "/message" -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): ctx = RequestContext.test_context(await create_test_profile()) ctx.connection_ready = True diff --git a/acapy_agent/protocols/discovery/v2_0/handlers/tests/test_queries_handler.py b/acapy_agent/protocols/discovery/v2_0/handlers/tests/test_queries_handler.py index b988702070..e81b52e9d3 100644 --- a/acapy_agent/protocols/discovery/v2_0/handlers/tests/test_queries_handler.py +++ b/acapy_agent/protocols/discovery/v2_0/handlers/tests/test_queries_handler.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from ......core.goal_code_registry import GoalCodeRegistry from ......core.protocol_registry import ProtocolRegistry @@ -25,7 +26,7 @@ TEST_MESSAGE_TYPE = TEST_MESSAGE_FAMILY + "/message" -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): ctx = RequestContext.test_context(await create_test_profile()) protocol_registry = ProtocolRegistry() diff --git a/acapy_agent/protocols/out_of_band/v1_0/handlers/tests/test_problem_report_handler.py b/acapy_agent/protocols/out_of_band/v1_0/handlers/tests/test_problem_report_handler.py index 2b4bb079d7..fe97e44884 100644 --- a/acapy_agent/protocols/out_of_band/v1_0/handlers/tests/test_problem_report_handler.py +++ b/acapy_agent/protocols/out_of_band/v1_0/handlers/tests/test_problem_report_handler.py @@ -1,6 +1,7 @@ """Test Problem Report Handler.""" import pytest +import pytest_asyncio from ......connections.models.conn_record import ConnRecord from ......messaging.request_context import RequestContext @@ -13,14 +14,14 @@ from ...messages.problem_report import OOBProblemReport, ProblemReportReason -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): ctx = RequestContext.test_context(await create_test_profile()) ctx.message_receipt = MessageReceipt() yield ctx -@pytest.fixture() +@pytest_asyncio.fixture async def connection_record(request_context, session): record = ConnRecord() request_context.connection_record = record @@ -28,7 +29,7 @@ async def connection_record(request_context, session): yield record -@pytest.fixture() +@pytest_asyncio.fixture async def session(request_context): yield await request_context.session() diff --git a/acapy_agent/protocols/out_of_band/v1_0/handlers/tests/test_reuse_accept_handler.py b/acapy_agent/protocols/out_of_band/v1_0/handlers/tests/test_reuse_accept_handler.py index 26b79bd588..b7d6d9966b 100644 --- a/acapy_agent/protocols/out_of_band/v1_0/handlers/tests/test_reuse_accept_handler.py +++ b/acapy_agent/protocols/out_of_band/v1_0/handlers/tests/test_reuse_accept_handler.py @@ -1,6 +1,7 @@ """Test Reuse Accept Message Handler.""" import pytest +import pytest_asyncio from ......connections.models.conn_record import ConnRecord from ......messaging.request_context import RequestContext @@ -13,14 +14,14 @@ from ...messages.reuse_accept import HandshakeReuseAccept -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): ctx = RequestContext.test_context(await create_test_profile()) ctx.message_receipt = MessageReceipt() yield ctx -@pytest.fixture() +@pytest_asyncio.fixture async def connection_record(request_context, session): record = ConnRecord() request_context.connection_record = record @@ -28,7 +29,7 @@ async def connection_record(request_context, session): yield record -@pytest.fixture() +@pytest_asyncio.fixture async def session(request_context): yield await request_context.session() diff --git a/acapy_agent/protocols/out_of_band/v1_0/handlers/tests/test_reuse_handler.py b/acapy_agent/protocols/out_of_band/v1_0/handlers/tests/test_reuse_handler.py index 808e6e9995..dbb17ce887 100644 --- a/acapy_agent/protocols/out_of_band/v1_0/handlers/tests/test_reuse_handler.py +++ b/acapy_agent/protocols/out_of_band/v1_0/handlers/tests/test_reuse_handler.py @@ -3,6 +3,7 @@ from typing import AsyncGenerator import pytest +import pytest_asyncio from ......connections.models.conn_record import ConnRecord from ......core.profile import ProfileSession @@ -17,14 +18,14 @@ from ...messages.reuse_accept import HandshakeReuseAccept -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): ctx = RequestContext.test_context(await create_test_profile()) ctx.message_receipt = MessageReceipt() yield ctx -@pytest.fixture() +@pytest_asyncio.fixture async def session(request_context) -> AsyncGenerator[ProfileSession, None]: yield await request_context.session() diff --git a/acapy_agent/protocols/out_of_band/v1_0/models/tests/test_out_of_band.py b/acapy_agent/protocols/out_of_band/v1_0/models/tests/test_out_of_band.py index 67922fb121..484ee89580 100644 --- a/acapy_agent/protocols/out_of_band/v1_0/models/tests/test_out_of_band.py +++ b/acapy_agent/protocols/out_of_band/v1_0/models/tests/test_out_of_band.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from ......core.profile import ProfileSession from ......utils.testing import create_test_profile @@ -6,7 +7,7 @@ from ..oob_record import OobRecord -@pytest.fixture() +@pytest_asyncio.fixture async def session(): profile = await create_test_profile() async with profile.session() as session: diff --git a/acapy_agent/protocols/problem_report/v1_0/tests/test_handler.py b/acapy_agent/protocols/problem_report/v1_0/tests/test_handler.py index aaf745d212..ec7a402a6a 100644 --- a/acapy_agent/protocols/problem_report/v1_0/tests/test_handler.py +++ b/acapy_agent/protocols/problem_report/v1_0/tests/test_handler.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from .....core.event_bus import EventBus, MockEventBus from .....messaging.request_context import RequestContext @@ -9,7 +10,7 @@ from ..message import ProblemReport -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): yield RequestContext.test_context(await create_test_profile()) diff --git a/acapy_agent/protocols/revocation_notification/v1_0/handlers/tests/test_revoke_handler.py b/acapy_agent/protocols/revocation_notification/v1_0/handlers/tests/test_revoke_handler.py index 3df1ef3b1d..9a3d3d12fb 100644 --- a/acapy_agent/protocols/revocation_notification/v1_0/handlers/tests/test_revoke_handler.py +++ b/acapy_agent/protocols/revocation_notification/v1_0/handlers/tests/test_revoke_handler.py @@ -3,6 +3,7 @@ from typing import Generator import pytest +import pytest_asyncio from ......core.event_bus import EventBus, MockEventBus from ......core.profile import Profile @@ -23,7 +24,7 @@ def responder(): yield MockResponder() -@pytest.fixture +@pytest_asyncio.fixture async def profile(event_bus): profile = await create_test_profile() profile.context.injector.bind_instance(EventBus, event_bus) diff --git a/acapy_agent/protocols/revocation_notification/v1_0/models/tests/test_rev_notification_record.py b/acapy_agent/protocols/revocation_notification/v1_0/models/tests/test_rev_notification_record.py index b415417899..88aad2a4a7 100644 --- a/acapy_agent/protocols/revocation_notification/v1_0/models/tests/test_rev_notification_record.py +++ b/acapy_agent/protocols/revocation_notification/v1_0/models/tests/test_rev_notification_record.py @@ -1,6 +1,7 @@ """Test RevNotificationRecord.""" import pytest +import pytest_asyncio from ......storage.error import StorageDuplicateError, StorageNotFoundError from ......utils.testing import create_test_profile @@ -8,7 +9,7 @@ from ..rev_notification_record import RevNotificationRecord -@pytest.fixture +@pytest_asyncio.fixture async def profile(): profile = await create_test_profile() yield profile diff --git a/acapy_agent/protocols/revocation_notification/v1_0/tests/test_routes.py b/acapy_agent/protocols/revocation_notification/v1_0/tests/test_routes.py index 53f0f0341b..0e2c0cb98a 100644 --- a/acapy_agent/protocols/revocation_notification/v1_0/tests/test_routes.py +++ b/acapy_agent/protocols/revocation_notification/v1_0/tests/test_routes.py @@ -1,6 +1,7 @@ """Test routes.py""" import pytest +import pytest_asyncio from .....config.settings import Settings from .....core.event_bus import Event, MockEventBus @@ -25,7 +26,7 @@ def responder(): yield MockResponder() -@pytest.fixture +@pytest_asyncio.fixture async def profile(responder): profile = await create_test_profile() profile.context.injector.bind_instance(BaseResponder, responder) diff --git a/acapy_agent/protocols/revocation_notification/v2_0/handlers/tests/test_revoke_handler.py b/acapy_agent/protocols/revocation_notification/v2_0/handlers/tests/test_revoke_handler.py index 791a9146d6..96b44a8b5a 100644 --- a/acapy_agent/protocols/revocation_notification/v2_0/handlers/tests/test_revoke_handler.py +++ b/acapy_agent/protocols/revocation_notification/v2_0/handlers/tests/test_revoke_handler.py @@ -3,6 +3,7 @@ from typing import Generator import pytest +import pytest_asyncio from ......core.event_bus import EventBus, MockEventBus from ......core.profile import Profile @@ -23,7 +24,7 @@ def responder(): yield MockResponder() -@pytest.fixture +@pytest_asyncio.fixture async def profile(event_bus): profile = await create_test_profile() profile.context.injector.bind_instance(EventBus, event_bus) diff --git a/acapy_agent/protocols/revocation_notification/v2_0/models/tests/test_rev_notification_record.py b/acapy_agent/protocols/revocation_notification/v2_0/models/tests/test_rev_notification_record.py index 2cc61a84a0..3440063fb6 100644 --- a/acapy_agent/protocols/revocation_notification/v2_0/models/tests/test_rev_notification_record.py +++ b/acapy_agent/protocols/revocation_notification/v2_0/models/tests/test_rev_notification_record.py @@ -1,6 +1,7 @@ """Test RevNotificationRecord.""" import pytest +import pytest_asyncio from ......storage.error import StorageDuplicateError, StorageNotFoundError from ......utils.testing import create_test_profile @@ -8,7 +9,7 @@ from ..rev_notification_record import RevNotificationRecord -@pytest.fixture +@pytest_asyncio.fixture async def profile(): profile = await create_test_profile() yield profile diff --git a/acapy_agent/protocols/revocation_notification/v2_0/tests/test_routes.py b/acapy_agent/protocols/revocation_notification/v2_0/tests/test_routes.py index 1f1915fc68..b4dec48f7f 100644 --- a/acapy_agent/protocols/revocation_notification/v2_0/tests/test_routes.py +++ b/acapy_agent/protocols/revocation_notification/v2_0/tests/test_routes.py @@ -1,6 +1,7 @@ """Test routes.py""" import pytest +import pytest_asyncio from .....config.settings import Settings from .....core.event_bus import Event, MockEventBus @@ -23,7 +24,7 @@ def responder(): yield MockResponder() -@pytest.fixture +@pytest_asyncio.fixture async def profile(responder): profile = await create_test_profile() profile.context.injector.bind_instance(BaseResponder, responder) diff --git a/acapy_agent/protocols/trustping/v1_0/handlers/tests/test_ping_handler.py b/acapy_agent/protocols/trustping/v1_0/handlers/tests/test_ping_handler.py index d18831857a..5aa4719fa7 100644 --- a/acapy_agent/protocols/trustping/v1_0/handlers/tests/test_ping_handler.py +++ b/acapy_agent/protocols/trustping/v1_0/handlers/tests/test_ping_handler.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from ......messaging.request_context import RequestContext from ......messaging.responder import MockResponder @@ -9,7 +10,7 @@ from ...messages.ping_response import PingResponse -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): yield RequestContext.test_context(await create_test_profile()) diff --git a/acapy_agent/protocols/trustping/v1_0/handlers/tests/test_ping_response_handler.py b/acapy_agent/protocols/trustping/v1_0/handlers/tests/test_ping_response_handler.py index 9cb91d4055..04bc6ff18f 100644 --- a/acapy_agent/protocols/trustping/v1_0/handlers/tests/test_ping_response_handler.py +++ b/acapy_agent/protocols/trustping/v1_0/handlers/tests/test_ping_response_handler.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from ......messaging.request_context import RequestContext from ......messaging.responder import MockResponder @@ -8,7 +9,7 @@ from ...messages.ping_response import PingResponse -@pytest.fixture() +@pytest_asyncio.fixture async def request_context(): yield RequestContext.test_context(await create_test_profile()) diff --git a/acapy_agent/resolver/default/tests/test_indy.py b/acapy_agent/resolver/default/tests/test_indy.py index a2d89b8a4a..2624980864 100644 --- a/acapy_agent/resolver/default/tests/test_indy.py +++ b/acapy_agent/resolver/default/tests/test_indy.py @@ -1,6 +1,7 @@ """Test IndyDIDResolver.""" import pytest +import pytest_asyncio from ....core.profile import Profile from ....ledger.base import BaseLedger @@ -38,7 +39,7 @@ def ledger(): yield ledger -@pytest.fixture +@pytest_asyncio.fixture async def profile(ledger): """Profile fixture.""" profile = await create_test_profile() diff --git a/acapy_agent/resolver/default/tests/test_jwk.py b/acapy_agent/resolver/default/tests/test_jwk.py index 318418f490..49d31bd3c1 100644 --- a/acapy_agent/resolver/default/tests/test_jwk.py +++ b/acapy_agent/resolver/default/tests/test_jwk.py @@ -1,6 +1,7 @@ """Test JwkDIDResolver.""" import pytest +import pytest_asyncio from ....core.profile import Profile from ....utils.testing import create_test_profile @@ -22,7 +23,7 @@ def resolver(): yield JwkDIDResolver() -@pytest.fixture +@pytest_asyncio.fixture async def profile(): """Profile fixture.""" profile = await create_test_profile() diff --git a/acapy_agent/resolver/default/tests/test_key.py b/acapy_agent/resolver/default/tests/test_key.py index dcf6dbb31c..83352661a4 100644 --- a/acapy_agent/resolver/default/tests/test_key.py +++ b/acapy_agent/resolver/default/tests/test_key.py @@ -1,6 +1,7 @@ """Test KeyDIDResolver.""" import pytest +import pytest_asyncio from ....core.profile import Profile from ....messaging.valid import DIDKey @@ -19,7 +20,7 @@ def resolver(): yield KeyDIDResolver() -@pytest.fixture +@pytest_asyncio.fixture async def profile(): """Profile fixture.""" profile = await create_test_profile() diff --git a/acapy_agent/resolver/default/tests/test_legacy_peer.py b/acapy_agent/resolver/default/tests/test_legacy_peer.py index 46f18caf8a..7dcc833e5d 100644 --- a/acapy_agent/resolver/default/tests/test_legacy_peer.py +++ b/acapy_agent/resolver/default/tests/test_legacy_peer.py @@ -2,6 +2,7 @@ import pydid import pytest +import pytest_asyncio from ....cache.base import BaseCache from ....cache.in_memory import InMemoryCache @@ -24,7 +25,7 @@ def resolver(): yield LegacyPeerDIDResolver() -@pytest.fixture +@pytest_asyncio.fixture async def profile(): """Profile fixture.""" profile = await create_test_profile() diff --git a/acapy_agent/resolver/default/tests/test_peer2.py b/acapy_agent/resolver/default/tests/test_peer2.py index 37ec3b4885..d4b6945aa5 100644 --- a/acapy_agent/resolver/default/tests/test_peer2.py +++ b/acapy_agent/resolver/default/tests/test_peer2.py @@ -1,6 +1,7 @@ """Test PeerDIDResolver.""" import pytest +import pytest_asyncio from ....core.profile import Profile from ....utils.testing import create_test_profile @@ -15,7 +16,7 @@ def resolver(): yield PeerDID2Resolver() -@pytest.fixture +@pytest_asyncio.fixture async def profile(): """Profile fixture.""" profile = await create_test_profile() diff --git a/acapy_agent/resolver/default/tests/test_peer3.py b/acapy_agent/resolver/default/tests/test_peer3.py index 7303478c35..a6e030e9fc 100644 --- a/acapy_agent/resolver/default/tests/test_peer3.py +++ b/acapy_agent/resolver/default/tests/test_peer3.py @@ -1,6 +1,7 @@ """Test PeerDIDResolver.""" import pytest +import pytest_asyncio from did_peer_2 import peer2to3 from ....connections.models.conn_record import ConnRecord @@ -21,7 +22,7 @@ def event_bus(): yield EventBus() -@pytest.fixture +@pytest_asyncio.fixture async def profile(event_bus: EventBus): """Profile fixture.""" profile = await create_test_profile() @@ -29,7 +30,7 @@ async def profile(event_bus: EventBus): yield profile -@pytest.fixture +@pytest_asyncio.fixture async def resolver(profile): """Resolver fixture.""" instance = PeerDID3Resolver() diff --git a/acapy_agent/resolver/default/tests/test_peer4.py b/acapy_agent/resolver/default/tests/test_peer4.py index d8001e7082..a6c1a3d80e 100644 --- a/acapy_agent/resolver/default/tests/test_peer4.py +++ b/acapy_agent/resolver/default/tests/test_peer4.py @@ -1,6 +1,7 @@ """Test PeerDIDResolver.""" import pytest +import pytest_asyncio from ....core.event_bus import EventBus from ....core.profile import Profile @@ -18,7 +19,7 @@ def event_bus(): yield EventBus() -@pytest.fixture +@pytest_asyncio.fixture async def profile(event_bus: EventBus): """Profile fixture.""" profile = await create_test_profile() @@ -26,7 +27,7 @@ async def profile(event_bus: EventBus): yield profile -@pytest.fixture +@pytest_asyncio.fixture async def resolver(profile): """Resolver fixture.""" instance = PeerDID4Resolver() diff --git a/acapy_agent/resolver/default/tests/test_universal.py b/acapy_agent/resolver/default/tests/test_universal.py index c516ac7068..51a721dc22 100644 --- a/acapy_agent/resolver/default/tests/test_universal.py +++ b/acapy_agent/resolver/default/tests/test_universal.py @@ -4,6 +4,7 @@ from typing import Dict, Optional, Union import pytest +import pytest_asyncio from ....config.settings import Settings from ....tests import mock @@ -13,7 +14,7 @@ from ..universal import UniversalResolver -@pytest.fixture +@pytest_asyncio.fixture async def resolver(): """Resolver fixture.""" yield UniversalResolver( @@ -21,7 +22,7 @@ async def resolver(): ) -@pytest.fixture +@pytest_asyncio.fixture async def profile(): """Profile fixture.""" profile = await create_test_profile() diff --git a/acapy_agent/resolver/default/tests/test_webvh.py b/acapy_agent/resolver/default/tests/test_webvh.py index 878843efc1..f000543c78 100644 --- a/acapy_agent/resolver/default/tests/test_webvh.py +++ b/acapy_agent/resolver/default/tests/test_webvh.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from ....core.profile import Profile from ....messaging.valid import DIDWebvh @@ -14,7 +15,7 @@ def resolver(): yield WebvhDIDResolver() -@pytest.fixture +@pytest_asyncio.fixture async def profile(): """Profile fixture.""" yield await create_test_profile() diff --git a/acapy_agent/resolver/tests/test_did_resolver.py b/acapy_agent/resolver/tests/test_did_resolver.py index 0d3a12fee9..da46d9c680 100644 --- a/acapy_agent/resolver/tests/test_did_resolver.py +++ b/acapy_agent/resolver/tests/test_did_resolver.py @@ -4,6 +4,7 @@ from typing import Pattern import pytest +import pytest_asyncio from pydid import DID, BasicDIDDocument, DIDDocument, VerificationMethod from ...utils.testing import create_test_profile @@ -93,7 +94,7 @@ def resolver(): return DIDResolver(did_resolver_registry) -@pytest.fixture +@pytest_asyncio.fixture async def profile(): profile = await create_test_profile() yield profile diff --git a/acapy_agent/resolver/tests/test_routes.py b/acapy_agent/resolver/tests/test_routes.py index 347fc685d5..bda5228e20 100644 --- a/acapy_agent/resolver/tests/test_routes.py +++ b/acapy_agent/resolver/tests/test_routes.py @@ -3,6 +3,7 @@ # pylint: disable=redefined-outer-name import pytest +import pytest_asyncio from pydid import DIDDocument from ...admin.request_context import AdminRequestContext @@ -56,7 +57,7 @@ def mock_resolver(resolution_result): yield did_resolver -@pytest.fixture +@pytest_asyncio.fixture async def profile(): profile = await create_test_profile( settings={ diff --git a/acapy_agent/settings/tests/test_routes.py b/acapy_agent/settings/tests/test_routes.py index 707aa6760a..7e46e506e0 100644 --- a/acapy_agent/settings/tests/test_routes.py +++ b/acapy_agent/settings/tests/test_routes.py @@ -3,6 +3,7 @@ # pylint: disable=redefined-outer-name import pytest +import pytest_asyncio from ...admin.request_context import AdminRequestContext from ...multitenant.base import BaseMultitenantManager @@ -21,13 +22,13 @@ def mock_response(): test_module.web.json_response = temp_value -@pytest.fixture +@pytest_asyncio.fixture async def profile(): profile = await create_test_profile() yield profile -@pytest.fixture +@pytest_asyncio.fixture async def admin_profile(): profile = await create_test_profile( settings={ diff --git a/acapy_agent/storage/tests/test_askar_storage.py b/acapy_agent/storage/tests/test_askar_storage.py index c959c6e3fa..caf24c6daf 100644 --- a/acapy_agent/storage/tests/test_askar_storage.py +++ b/acapy_agent/storage/tests/test_askar_storage.py @@ -3,6 +3,7 @@ from unittest import IsolatedAsyncioTestCase import pytest +import pytest_asyncio from ...askar.profile import AskarProfileManager from ...config.injection_context import InjectionContext @@ -15,7 +16,7 @@ from ..record import StorageRecord -@pytest.fixture() +@pytest_asyncio.fixture async def store(): context = InjectionContext() profile = await AskarProfileManager().provision( diff --git a/acapy_agent/storage/vc_holder/tests/test_askar_vc_holder.py b/acapy_agent/storage/vc_holder/tests/test_askar_vc_holder.py index 23be81084b..d9732b106c 100644 --- a/acapy_agent/storage/vc_holder/tests/test_askar_vc_holder.py +++ b/acapy_agent/storage/vc_holder/tests/test_askar_vc_holder.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio from ....storage.error import StorageDuplicateError, StorageNotFoundError from ....utils.testing import create_test_profile @@ -14,7 +15,7 @@ VC_GIVEN_ID = "http://example.edu/credentials/3732" -@pytest.fixture() +@pytest_asyncio.fixture async def holder(): profile = await create_test_profile(settings={"wallet.type": "askar"}) yield profile.inject(VCHolder) diff --git a/acapy_agent/vc/vc_di/tests/test_manager.py b/acapy_agent/vc/vc_di/tests/test_manager.py index 757ada3808..02c56b5ade 100644 --- a/acapy_agent/vc/vc_di/tests/test_manager.py +++ b/acapy_agent/vc/vc_di/tests/test_manager.py @@ -1,6 +1,7 @@ """Test VCDIManager.""" import pytest +import pytest_asyncio from anoncreds import W3cPresentation from ....anoncreds.registry import AnonCredsRegistry @@ -145,7 +146,7 @@ } -@pytest.fixture +@pytest_asyncio.fixture async def profile(): profile = await create_test_profile() profile.context.injector.bind_instance(DIDResolver, DIDResolver([KeyDIDResolver()])) diff --git a/acapy_agent/vc/vc_di/tests/test_prove.py b/acapy_agent/vc/vc_di/tests/test_prove.py index a9e8a92ab2..1961715319 100644 --- a/acapy_agent/vc/vc_di/tests/test_prove.py +++ b/acapy_agent/vc/vc_di/tests/test_prove.py @@ -1,6 +1,7 @@ """test prove.py""" import pytest +import pytest_asyncio from anoncreds import CredentialRevocationState, RevocationStatusList from ....anoncreds.holder import AnonCredsHolder, AnonCredsHolderError @@ -33,7 +34,7 @@ def resolver(): yield DIDResolver([KeyDIDResolver()]) -@pytest.fixture +@pytest_asyncio.fixture async def profile(resolver: DIDResolver): profile = await create_test_profile() profile.context.injector.bind_instance(DIDMethods, DIDMethods()) From 8ea254296692ee96c767847a18b4330c2423d519 Mon Sep 17 00:00:00 2001 From: Mourits de Beer <31511766+ff137@users.noreply.github.com> Date: Wed, 16 Apr 2025 19:12:20 +0200 Subject: [PATCH 10/32] :art: Add type hints to anoncreds module (#3652) * :art: Add type hints Signed-off-by: ff137 * :art: Add todo comments. #3651 Signed-off-by: ff137 * :art: Add return types Signed-off-by: ff137 --------- Signed-off-by: ff137 --- acapy_agent/anoncreds/__init__.py | 2 +- acapy_agent/anoncreds/base.py | 6 ++-- .../anoncreds/default/did_indy/registry.py | 2 +- .../anoncreds/default/did_web/registry.py | 4 +-- .../anoncreds/default/legacy_indy/author.py | 9 +++-- .../anoncreds/default/legacy_indy/recover.py | 2 +- .../anoncreds/default/legacy_indy/registry.py | 2 +- acapy_agent/anoncreds/holder.py | 8 ++--- acapy_agent/anoncreds/issuer.py | 10 +++--- acapy_agent/anoncreds/models/credential.py | 2 +- .../anoncreds/models/credential_definition.py | 2 +- .../anoncreds/models/credential_proposal.py | 4 ++- .../anoncreds/models/presentation_request.py | 2 +- acapy_agent/anoncreds/models/revocation.py | 10 +++--- acapy_agent/anoncreds/models/schema.py | 2 +- acapy_agent/anoncreds/models/utils.py | 4 +-- acapy_agent/anoncreds/registry.py | 2 +- acapy_agent/anoncreds/revocation.py | 36 ++++++++++--------- acapy_agent/anoncreds/revocation_setup.py | 10 +++--- acapy_agent/anoncreds/routes.py | 6 ++-- acapy_agent/anoncreds/util.py | 2 +- acapy_agent/anoncreds/verifier.py | 14 ++++---- acapy_agent/vc/vc_di/prove.py | 10 +++--- 23 files changed, 80 insertions(+), 71 deletions(-) diff --git a/acapy_agent/anoncreds/__init__.py b/acapy_agent/anoncreds/__init__.py index c8c2f8ecaa..6585b1bb08 100644 --- a/acapy_agent/anoncreds/__init__.py +++ b/acapy_agent/anoncreds/__init__.py @@ -7,7 +7,7 @@ LOGGER = logging.getLogger(__name__) -async def setup(context: InjectionContext): +async def setup(context: InjectionContext) -> None: """Set up default resolvers.""" registry = context.inject_or(AnonCredsRegistry) if not registry: diff --git a/acapy_agent/anoncreds/base.py b/acapy_agent/anoncreds/base.py index a1e4cd7a85..6474dee556 100644 --- a/acapy_agent/anoncreds/base.py +++ b/acapy_agent/anoncreds/base.py @@ -66,7 +66,7 @@ def __init__( self.obj = obj @property - def message(self): + def message(self) -> str: """Message.""" return f"{self._message}: {self.obj_id}, {self.obj}" @@ -75,12 +75,12 @@ class AnonCredsSchemaAlreadyExists(AnonCredsObjectAlreadyExists[AnonCredsSchema] """Raised when a schema already exists.""" @property - def schema_id(self): + def schema_id(self) -> str: """Get Schema Id.""" return self.obj_id @property - def schema(self): + def schema(self) -> AnonCredsSchema: """Get Schema.""" return self.obj diff --git a/acapy_agent/anoncreds/default/did_indy/registry.py b/acapy_agent/anoncreds/default/did_indy/registry.py index 23c6d6e014..e49325ea3f 100644 --- a/acapy_agent/anoncreds/default/did_indy/registry.py +++ b/acapy_agent/anoncreds/default/did_indy/registry.py @@ -40,7 +40,7 @@ def supported_identifiers_regex(self) -> Pattern: return self._supported_identifiers_regex # TODO: fix regex (too general) - async def setup(self, context: InjectionContext): + async def setup(self, context: InjectionContext) -> None: """Setup.""" LOGGER.info("Successfully registered DIDIndyRegistry") diff --git a/acapy_agent/anoncreds/default/did_web/registry.py b/acapy_agent/anoncreds/default/did_web/registry.py index 5a432096ab..c3dfdfa484 100644 --- a/acapy_agent/anoncreds/default/did_web/registry.py +++ b/acapy_agent/anoncreds/default/did_web/registry.py @@ -42,11 +42,11 @@ def supported_identifiers_regex(self) -> Pattern: return self._supported_identifiers_regex # TODO: fix regex (too general) - async def setup(self, context: InjectionContext): + async def setup(self, context: InjectionContext) -> None: """Setup.""" LOGGER.info("Successfully registered DIDWebRegistry") - async def get_schema(self, profile, schema_id: str) -> GetSchemaResult: + async def get_schema(self, profile: Profile, schema_id: str) -> GetSchemaResult: """Get a schema from the registry.""" raise NotImplementedError() diff --git a/acapy_agent/anoncreds/default/legacy_indy/author.py b/acapy_agent/anoncreds/default/legacy_indy/author.py index dc8bd829a7..50aeb28173 100644 --- a/acapy_agent/anoncreds/default/legacy_indy/author.py +++ b/acapy_agent/anoncreds/default/legacy_indy/author.py @@ -5,12 +5,15 @@ from aiohttp import web from acapy_agent.connections.models.conn_record import ConnRecord +from acapy_agent.core.profile import Profile from acapy_agent.messaging.models.base import BaseModelError from acapy_agent.protocols.endorse_transaction.v1_0.util import get_endorser_connection_id from acapy_agent.storage.error import StorageNotFoundError -async def get_endorser_info(profile, options: Optional[dict] = None): +async def get_endorser_info( + profile: Profile, options: Optional[dict] = None +) -> tuple[str, str]: """Gets the endorser did for the current transaction.""" options = options or {} endorser_connection_id = options.get("endorser_connection_id", None) @@ -43,8 +46,8 @@ async def get_endorser_info(profile, options: Optional[dict] = None): if "endorser_did" not in endorser_info.keys(): raise web.HTTPForbidden( reason=( - ' "endorser_did" is not set in "endorser_info"' - " in connection metadata for this connection record" + '"endorser_did" is not set in "endorser_info" ' + "in connection metadata for this connection record" ) ) diff --git a/acapy_agent/anoncreds/default/legacy_indy/recover.py b/acapy_agent/anoncreds/default/legacy_indy/recover.py index 4c3eaf0517..65a725ceae 100644 --- a/acapy_agent/anoncreds/default/legacy_indy/recover.py +++ b/acapy_agent/anoncreds/default/legacy_indy/recover.py @@ -90,7 +90,7 @@ async def fetch_txns( return registry_from_ledger, revoked -async def generate_ledger_rrrecovery_txn(genesis_txns: str, rev_list: RevList): +async def generate_ledger_rrrecovery_txn(genesis_txns: str, rev_list: RevList) -> dict: """Generate a new ledger accum entry, using the wallet value if revocations ahead of ledger.""" # noqa: E501 registry_from_ledger, prev_revoked = await fetch_txns( diff --git a/acapy_agent/anoncreds/default/legacy_indy/registry.py b/acapy_agent/anoncreds/default/legacy_indy/registry.py index 5e41d3a263..0af07c5b81 100644 --- a/acapy_agent/anoncreds/default/legacy_indy/registry.py +++ b/acapy_agent/anoncreds/default/legacy_indy/registry.py @@ -138,7 +138,7 @@ def supported_identifiers_regex(self) -> Pattern: """Supported Identifiers Regular Expression.""" return self._supported_identifiers_regex - async def setup(self, context: InjectionContext): + async def setup(self, context: InjectionContext) -> None: """Setup.""" LOGGER.info("Successfully registered LegacyIndyRegistry") diff --git a/acapy_agent/anoncreds/holder.py b/acapy_agent/anoncreds/holder.py index 79f2e58754..c7fac1e221 100644 --- a/acapy_agent/anoncreds/holder.py +++ b/acapy_agent/anoncreds/holder.py @@ -41,7 +41,7 @@ CATEGORY_MASTER_SECRET = "master_secret" -def _make_cred_info(cred_id, cred: Credential): +def _make_cred_info(cred_id: str, cred: Credential) -> dict: cred_info = cred.to_dict() # not secure! rev_info = cred_info["signature"]["r_credential"] return { @@ -372,7 +372,7 @@ async def store_credential_w3c( return credential_id - async def get_credentials(self, *, offset: int, limit: int, wql: dict): + async def get_credentials(self, *, offset: int, limit: int, wql: dict) -> list[dict]: """Get credentials stored in the wallet. Args: @@ -410,7 +410,7 @@ async def get_credentials_for_presentation_request_by_referent( offset: int, limit: int, extra_query: Optional[dict] = None, - ): + ) -> list: """Get credentials stored in the wallet. Args: @@ -543,7 +543,7 @@ async def credential_revoked( return cred.rev_reg_index in set_revoked - async def delete_credential(self, credential_id: str): + async def delete_credential(self, credential_id: str) -> None: """Remove a credential stored in the wallet. Args: diff --git a/acapy_agent/anoncreds/issuer.py b/acapy_agent/anoncreds/issuer.py index 7f7e7b93a1..5bbcbb8f16 100644 --- a/acapy_agent/anoncreds/issuer.py +++ b/acapy_agent/anoncreds/issuer.py @@ -98,7 +98,7 @@ def profile(self) -> AskarAnonCredsProfile: return self._profile - async def notify(self, event: Event): + async def notify(self, event: Event) -> None: """Accessor for the event bus instance.""" event_bus = self.profile.inject(EventBus) await event_bus.notify(self._profile, event) @@ -134,7 +134,7 @@ async def _finish_registration( async def store_schema( self, result: SchemaResult, - ): + ) -> None: """Store schema after reaching finished state.""" identifier = result.job_id or result.schema_state.schema_id if not identifier: @@ -234,7 +234,7 @@ async def create_and_register_schema( except (AnoncredsError, BaseAnonCredsError) as err: raise AnonCredsIssuerError("Error creating schema") from err - async def finish_schema(self, job_id: str, schema_id: str): + async def finish_schema(self, job_id: str, schema_id: str) -> None: """Mark a schema as finished.""" async with self.profile.transaction() as txn: await self._finish_registration(txn, CATEGORY_SCHEMA, job_id, schema_id) @@ -386,7 +386,7 @@ async def store_credential_definition( support_revocation: bool, max_cred_num: int, options: Optional[dict] = None, - ): + ) -> None: """Store the cred def and it's components in the wallet.""" options = options or {} identifier = ( @@ -443,7 +443,7 @@ async def store_credential_definition( async def finish_cred_def( self, job_id: str, cred_def_id: str, options: Optional[dict] = None - ): + ) -> None: """Finish a cred def.""" async with self.profile.transaction() as txn: entry = await self._finish_registration( diff --git a/acapy_agent/anoncreds/models/credential.py b/acapy_agent/anoncreds/models/credential.py index 1b5d3063cd..6458518c88 100644 --- a/acapy_agent/anoncreds/models/credential.py +++ b/acapy_agent/anoncreds/models/credential.py @@ -65,7 +65,7 @@ def __init__(self, **kwargs): **kwargs, ) - def _deserialize(self, value, attr, data, **kwargs): + def _deserialize(self, value: dict, attr: str, data: dict, **kwargs) -> dict: """Deserialize dict with anoncreds attribute value.""" if not isinstance(value, dict): raise ValidationError("Value must be a dict.") diff --git a/acapy_agent/anoncreds/models/credential_definition.py b/acapy_agent/anoncreds/models/credential_definition.py index f2961b8b9a..d660963b4e 100644 --- a/acapy_agent/anoncreds/models/credential_definition.py +++ b/acapy_agent/anoncreds/models/credential_definition.py @@ -239,7 +239,7 @@ def from_native(cls, cred_def: CredentialDefinition): """Convert a native credential definition to a CredDef object.""" return cls.deserialize(cred_def.to_json()) - def to_native(self): + def to_native(self) -> CredentialDefinition: """Convert to native anoncreds credential definition.""" return CredentialDefinition.load(self.serialize()) diff --git a/acapy_agent/anoncreds/models/credential_proposal.py b/acapy_agent/anoncreds/models/credential_proposal.py index cac1ec027a..7ab9a66853 100644 --- a/acapy_agent/anoncreds/models/credential_proposal.py +++ b/acapy_agent/anoncreds/models/credential_proposal.py @@ -76,7 +76,9 @@ class AnonCredsCredentialDefinitionProposal(OpenAPISchema): EVENT_LISTENER_PATTERN = re.compile(f"^{CRED_DEF_EVENT_PREFIX}(.*)?$") -async def notify_cred_def_event(profile: Profile, cred_def_id: str, meta_data: dict): +async def notify_cred_def_event( + profile: Profile, cred_def_id: str, meta_data: dict +) -> None: """Send notification for a cred def post-process event.""" await profile.notify( CRED_DEF_EVENT_PREFIX + cred_def_id, diff --git a/acapy_agent/anoncreds/models/presentation_request.py b/acapy_agent/anoncreds/models/presentation_request.py index aa9df77996..1ecfcb1515 100644 --- a/acapy_agent/anoncreds/models/presentation_request.py +++ b/acapy_agent/anoncreds/models/presentation_request.py @@ -163,7 +163,7 @@ class AnonCredsPresentationReqAttrSpecSchema(OpenAPISchema): ) @validates_schema - def validate_fields(self, data, **kwargs): + def validate_fields(self, data: dict, **kwargs) -> None: """Validate schema fields. Data must have exactly one of name or names; if names then restrictions are diff --git a/acapy_agent/anoncreds/models/revocation.py b/acapy_agent/anoncreds/models/revocation.py index 50bbd0e388..c182dc7ecb 100644 --- a/acapy_agent/anoncreds/models/revocation.py +++ b/acapy_agent/anoncreds/models/revocation.py @@ -113,7 +113,7 @@ def from_native(cls, rev_reg_def: RevocationRegistryDefinition): """Convert a native revocation registry definition to a RevRegDef object.""" return cls.deserialize(rev_reg_def.to_json()) - def to_native(self): + def to_native(self) -> RevocationRegistryDefinition: """Convert to native anoncreds revocation registry definition.""" return RevocationRegistryDefinition.load(self.serialize()) @@ -250,12 +250,12 @@ def __init__( ) @property - def rev_reg_def_id(self): + def rev_reg_def_id(self) -> str: """Revocation Registry Definition ID.""" return self.revocation_registry_definition_state.revocation_registry_definition_id @property - def rev_reg_def(self): + def rev_reg_def(self) -> RevRegDef: """Revocation Registry Definition.""" return self.revocation_registry_definition_state.revocation_registry_definition @@ -362,7 +362,7 @@ def from_native(cls, rev_list: RevocationStatusList): """Convert from native revocation list.""" return cls.deserialize(rev_list.to_json()) - def to_native(self): + def to_native(self) -> RevocationStatusList: """Convert to native revocation list.""" return RevocationStatusList.load(self.serialize()) @@ -498,7 +498,7 @@ def __init__( self.revocation_list_metadata = revocation_list_metadata @property - def rev_reg_def_id(self): + def rev_reg_def_id(self) -> str: """Rev reg def id.""" return self.revocation_list_state.revocation_list.rev_reg_def_id diff --git a/acapy_agent/anoncreds/models/schema.py b/acapy_agent/anoncreds/models/schema.py index c513b3afd3..ea2b850eda 100644 --- a/acapy_agent/anoncreds/models/schema.py +++ b/acapy_agent/anoncreds/models/schema.py @@ -41,7 +41,7 @@ def from_native(cls, schema: Schema) -> "AnonCredsSchema": """Convert from native object.""" return cls.deserialize(schema.to_dict()) - def to_native(self): + def to_native(self) -> Schema: """Convert to native object.""" return Schema.load(self.serialize()) diff --git a/acapy_agent/anoncreds/models/utils.py b/acapy_agent/anoncreds/models/utils.py index ac5797190c..6d16eaaafa 100644 --- a/acapy_agent/anoncreds/models/utils.py +++ b/acapy_agent/anoncreds/models/utils.py @@ -16,7 +16,7 @@ async def get_requested_creds_from_proof_request_preview( proof_request: dict, *, holder: AnonCredsHolder, -): +) -> dict[str, dict]: """Build anoncreds requested-credentials structure. Given input proof request and presentation preview, use credentials in @@ -81,7 +81,7 @@ async def get_requested_creds_from_proof_request_preview( return req_creds -def extract_non_revocation_intervals_from_proof_request(proof_req: dict): +def extract_non_revocation_intervals_from_proof_request(proof_req: dict) -> dict: """Return non-revocation intervals by requested item referent in proof request.""" non_revoc_intervals = {} for req_item_type in ("requested_attributes", "requested_predicates"): diff --git a/acapy_agent/anoncreds/registry.py b/acapy_agent/anoncreds/registry.py index ae4e9b9afb..cc823d5c1b 100644 --- a/acapy_agent/anoncreds/registry.py +++ b/acapy_agent/anoncreds/registry.py @@ -37,7 +37,7 @@ def __init__(self, registries: Optional[List[BaseAnonCredsHandler]] = None): for registry in registries: self.register(registry) - def register(self, registry: BaseAnonCredsHandler): + def register(self, registry: BaseAnonCredsHandler) -> None: """Register a new registry.""" if isinstance(registry, BaseAnonCredsResolver): self.resolvers.append(registry) diff --git a/acapy_agent/anoncreds/revocation.py b/acapy_agent/anoncreds/revocation.py index 8acf23b671..4a6f9f0d5c 100644 --- a/acapy_agent/anoncreds/revocation.py +++ b/acapy_agent/anoncreds/revocation.py @@ -99,7 +99,7 @@ def profile(self) -> AskarAnonCredsProfile: return self._profile - async def notify(self, event: Event): + async def notify(self, event: Event) -> None: """Emit an event on the event bus.""" event_bus = self.profile.inject(EventBus) await event_bus.notify(self.profile, event) @@ -216,7 +216,7 @@ async def store_revocation_registry_definition( result: RevRegDefResult, rev_reg_def_private: RevocationRegistryDefinitionPrivate, options: Optional[dict] = None, - ): + ) -> None: """Store a revocation registry definition.""" options = options or {} identifier = result.job_id or result.rev_reg_def_id @@ -259,7 +259,7 @@ async def store_revocation_registry_definition( async def finish_revocation_registry_definition( self, job_id: str, rev_reg_def_id: str, options: Optional[dict] = None - ): + ) -> None: """Mark a rev reg def as finished.""" options = options or {} async with self.profile.transaction() as txn: @@ -333,7 +333,7 @@ async def get_created_revocation_registry_definition( return None - async def set_active_registry(self, rev_reg_def_id: str): + async def set_active_registry(self, rev_reg_def_id: str) -> None: """Mark a registry as active.""" async with self.profile.transaction() as txn: entry = await txn.handle.fetch( @@ -391,7 +391,7 @@ async def set_active_registry(self, rev_reg_def_id: str): async def create_and_register_revocation_list( self, rev_reg_def_id: str, options: Optional[dict] = None - ): + ) -> RevListResult: """Create and register a revocation list.""" options = options or {} try: @@ -460,7 +460,7 @@ async def create_and_register_revocation_list( return result - async def store_revocation_registry_list(self, result: RevListResult): + async def store_revocation_registry_list(self, result: RevListResult) -> None: """Store a revocation registry list.""" identifier = result.job_id or result.rev_reg_def_id @@ -502,7 +502,7 @@ async def store_revocation_registry_list(self, result: RevListResult): async def finish_revocation_list( self, job_id: str, rev_reg_def_id: str, revoked: list - ): + ) -> None: """Mark a revocation list as finished.""" async with self.profile.transaction() as txn: # Finish the registration if the list is new, otherwise already updated @@ -529,7 +529,7 @@ async def update_revocation_list( curr: RevList, revoked: Sequence[int], options: Optional[dict] = None, - ): + ) -> RevListResult: """Publish and update to a revocation list.""" options = options or {} try: @@ -667,12 +667,12 @@ async def retrieve_tails(self, rev_reg_def: RevRegDef) -> str: return str(tails_file_path) - def _check_url(self, url) -> None: + def _check_url(self, url: str) -> None: parsed = urlparse(url) if not (parsed.scheme and parsed.netloc and parsed.path): raise AnonCredsRevocationError("URI {} is not a valid URL".format(url)) - def generate_public_tails_uri(self, rev_reg_def: RevRegDef): + def generate_public_tails_uri(self, rev_reg_def: RevRegDef) -> str: """Construct tails uri from rev_reg_def.""" tails_base_url = self.profile.settings.get("tails_server_base_url") if not tails_base_url: @@ -690,7 +690,7 @@ def get_local_tails_path(self, rev_reg_def: RevRegDef) -> str: tails_dir = indy_client_dir("tails", create=False) return os.path.join(tails_dir, rev_reg_def.value.tails_hash) - async def upload_tails_file(self, rev_reg_def: RevRegDef): + async def upload_tails_file(self, rev_reg_def: RevRegDef) -> None: """Upload the local tails file to the tails server.""" tails_server = AnonCredsTailsServer() @@ -730,7 +730,7 @@ async def get_or_fetch_local_tails_path(self, rev_reg_def: RevRegDef) -> str: # Registry Management - async def handle_full_registry(self, rev_reg_def_id: str): + async def handle_full_registry(self, rev_reg_def_id: str) -> None: """Update the registry status and start the next registry generation.""" async with self.profile.session() as session: active_rev_reg_def = await session.handle.fetch( @@ -791,7 +791,7 @@ async def handle_full_registry(self, rev_reg_def_id: str): LOGGER.info(f"Current rev_reg_def_id = {backup_rev_reg_def_id}") LOGGER.info(f"Backup reg = {backup_reg.rev_reg_def_id}") - async def decommission_registry(self, cred_def_id: str): + async def decommission_registry(self, cred_def_id: str) -> list: """Decommission post-init registries and start the next registry generation.""" active_reg = await self.get_or_create_active_registry(cred_def_id) @@ -1449,7 +1449,7 @@ async def revoke_pending_credentials( ) return result - async def mark_pending_revocations(self, rev_reg_def_id: str, *crids: int): + async def mark_pending_revocations(self, rev_reg_def_id: str, *crids: int) -> None: """Cred rev ids stored to publish later.""" async with self.profile.transaction() as txn: entry = await txn.handle.fetch( @@ -1495,7 +1495,7 @@ async def clear_pending_revocations( txn: ProfileSession, rev_reg_def_id: str, crid_mask: Optional[Sequence[int]] = None, - ): + ) -> None: """Clear pending revocations.""" if not isinstance(txn, AskarAnonCredsProfileSession): raise ValueError("Askar wallet required") @@ -1526,10 +1526,12 @@ async def clear_pending_revocations( tags=tags, ) - async def set_tails_file_public_uri(self, rev_reg_id, tails_public_uri): + async def set_tails_file_public_uri(self, rev_reg_id: str, tails_public_uri: str): """Update Revocation Registry tails file public uri.""" + # TODO: Implement or remove pass - async def set_rev_reg_state(self, rev_reg_id, state): + async def set_rev_reg_state(self, rev_reg_id: str, state: str): """Update Revocation Registry state.""" + # TODO: Implement or remove pass diff --git a/acapy_agent/anoncreds/revocation_setup.py b/acapy_agent/anoncreds/revocation_setup.py index 8714858a11..a94220eb2c 100644 --- a/acapy_agent/anoncreds/revocation_setup.py +++ b/acapy_agent/anoncreds/revocation_setup.py @@ -58,13 +58,13 @@ class DefaultRevocationSetup(AnonCredsRevocationSetupManager): def __init__(self): """Init manager.""" - def register_events(self, event_bus: EventBus): + def register_events(self, event_bus: EventBus) -> None: """Register event listeners.""" event_bus.subscribe(CRED_DEF_FINISHED_PATTERN, self.on_cred_def) event_bus.subscribe(REV_REG_DEF_FINISHED_PATTERN, self.on_rev_reg_def) event_bus.subscribe(REV_LIST_FINISHED_PATTERN, self.on_rev_list) - async def on_cred_def(self, profile: Profile, event: CredDefFinishedEvent): + async def on_cred_def(self, profile: Profile, event: CredDefFinishedEvent) -> None: """Handle cred def finished.""" payload = event.payload @@ -80,7 +80,9 @@ async def on_cred_def(self, profile: Profile, event: CredDefFinishedEvent): options=payload.options, ) - async def on_rev_reg_def(self, profile: Profile, event: RevRegDefFinishedEvent): + async def on_rev_reg_def( + self, profile: Profile, event: RevRegDefFinishedEvent + ) -> None: """Handle rev reg def finished.""" payload = event.payload @@ -110,7 +112,7 @@ async def on_rev_reg_def(self, profile: Profile, event: RevRegDefFinishedEvent): # Mark the first registry as active await revoc.set_active_registry(payload.rev_reg_def_id) - async def on_rev_list(self, profile: Profile, event: RevListFinishedEvent): + async def on_rev_list(self, profile: Profile, event: RevListFinishedEvent) -> None: """Handle rev list finished.""" await notify_revocation_published_event( profile, event.payload.rev_reg_id, event.payload.revoked diff --git a/acapy_agent/anoncreds/routes.py b/acapy_agent/anoncreds/routes.py index 5739679d50..f60631af7d 100644 --- a/acapy_agent/anoncreds/routes.py +++ b/acapy_agent/anoncreds/routes.py @@ -791,14 +791,14 @@ async def set_active_registry(request: web.BaseRequest): raise web.HTTPInternalServerError(reason=str(e)) from e -def register_events(event_bus: EventBus): +def register_events(event_bus: EventBus) -> None: """Register events.""" # TODO Make this pluggable? setup_manager = DefaultRevocationSetup() setup_manager.register_events(event_bus) -async def register(app: web.Application): +async def register(app: web.Application) -> None: """Register routes.""" app.add_routes( @@ -825,7 +825,7 @@ async def register(app: web.Application): ) -def post_process_routes(app: web.Application): +def post_process_routes(app: web.Application) -> None: """Amend swagger API.""" # Add top-level tags description diff --git a/acapy_agent/anoncreds/util.py b/acapy_agent/anoncreds/util.py index f1f8c662c1..1557f54663 100644 --- a/acapy_agent/anoncreds/util.py +++ b/acapy_agent/anoncreds/util.py @@ -45,7 +45,7 @@ def indy_client_dir(subpath: Optional[str] = None, create: bool = False) -> str: return target_dir -def handle_value_error(e: ValueError): +def handle_value_error(e: ValueError) -> None: """Handle ValueError message as web response type.""" if ANONCREDS_PROFILE_REQUIRED_MSG in str(e): raise web.HTTPForbidden(reason=str(e)) from e diff --git a/acapy_agent/anoncreds/verifier.py b/acapy_agent/anoncreds/verifier.py index a2f454c43b..7521f69925 100644 --- a/acapy_agent/anoncreds/verifier.py +++ b/acapy_agent/anoncreds/verifier.py @@ -428,12 +428,12 @@ async def process_pres_identifiers( async def verify_presentation( self, - pres_req, - pres, - schemas, - credential_definitions, - rev_reg_defs, - rev_lists, + pres_req: dict, + pres: dict, + schemas: dict, + credential_definitions: dict, + rev_reg_defs: dict, + rev_lists: dict, ) -> Tuple[bool, list]: """Verify a presentation. @@ -490,7 +490,7 @@ async def verify_presentation( return (verified, msgs) async def verify_presentation_w3c( - self, pres_req, pres, cred_metadata + self, pres_req: dict, pres: dict, cred_metadata: list ) -> PresentationVerificationResult: """Verify a W3C presentation. diff --git a/acapy_agent/vc/vc_di/prove.py b/acapy_agent/vc/vc_di/prove.py index acba4359fe..0180908724 100644 --- a/acapy_agent/vc/vc_di/prove.py +++ b/acapy_agent/vc/vc_di/prove.py @@ -166,11 +166,11 @@ async def create_rev_states( async def prepare_data_for_presentation( - presentation, - w3c_creds, - pres_definition, - profile, - challenge, + presentation: dict, + w3c_creds: list, + pres_definition: dict, + profile: Profile, + challenge: str, ) -> tuple[dict[str, Any], list, list]: """prepare_data_for_presentation. From 070662926eec20c409050a725f76a1df4a722259 Mon Sep 17 00:00:00 2001 From: Mourits de Beer <31511766+ff137@users.noreply.github.com> Date: Wed, 16 Apr 2025 19:29:01 +0200 Subject: [PATCH 11/32] :art: Add type hints to `messaging/jsonld` (#3650) Signed-off-by: ff137 --- .../messaging/jsonld/create_verify_data.py | 27 +++++++++++-------- acapy_agent/messaging/jsonld/credential.py | 27 +++++++++++-------- 2 files changed, 32 insertions(+), 22 deletions(-) diff --git a/acapy_agent/messaging/jsonld/create_verify_data.py b/acapy_agent/messaging/jsonld/create_verify_data.py index 1ec1ef7502..2eac10c2bb 100644 --- a/acapy_agent/messaging/jsonld/create_verify_data.py +++ b/acapy_agent/messaging/jsonld/create_verify_data.py @@ -9,6 +9,7 @@ from pyld import jsonld +from ...vc.ld_proofs import DocumentLoader from .error import ( DroppedAttributeError, MissingVerificationMethodError, @@ -16,7 +17,7 @@ ) -def _canonize(data, document_loader=None): +def _canonize(data: dict, document_loader: DocumentLoader | None = None) -> dict: return jsonld.normalize( data, { @@ -27,32 +28,36 @@ def _canonize(data, document_loader=None): ) -def _sha256(data): +def _sha256(data: str) -> str: return hashlib.sha256(data.encode("utf-8")).hexdigest() -def _canonize_signature_options(signatureOptions, document_loader=None): - _signatureOptions = {**signatureOptions, "@context": "https://w3id.org/security/v2"} - _signatureOptions.pop("jws", None) - _signatureOptions.pop("signatureValue", None) - _signatureOptions.pop("proofValue", None) - return _canonize(_signatureOptions, document_loader) +def _canonize_signature_options( + signature_options: dict, document_loader: DocumentLoader | None = None +) -> dict: + _signature_options = {**signature_options, "@context": "https://w3id.org/security/v2"} + _signature_options.pop("jws", None) + _signature_options.pop("signatureValue", None) + _signature_options.pop("proofValue", None) + return _canonize(_signature_options, document_loader) -def _canonize_document(doc, document_loader=None): +def _canonize_document(doc: dict, document_loader: DocumentLoader | None = None) -> dict: _doc = {**doc} _doc.pop("proof", None) return _canonize(_doc, document_loader) -def _created_at(): +def _created_at() -> str: """Creation Timestamp.""" stamp = datetime.datetime.now(datetime.timezone.utc) return stamp.strftime("%Y-%m-%dT%H:%M:%SZ") -def create_verify_data(data, signature_options, document_loader=None): +def create_verify_data( + data: dict, signature_options: dict, document_loader: DocumentLoader | None = None +) -> tuple[dict, str]: """Encapsulate process of constructing string used during sign and verify.""" signature_options["type"] = signature_options.get("type", "Ed25519Signature2018") diff --git a/acapy_agent/messaging/jsonld/credential.py b/acapy_agent/messaging/jsonld/credential.py index 04151fbe95..995c4366b0 100644 --- a/acapy_agent/messaging/jsonld/credential.py +++ b/acapy_agent/messaging/jsonld/credential.py @@ -2,6 +2,7 @@ import json +from ...core.profile import ProfileSession from ...did.did_key import DIDKey from ...vc.ld_proofs import DocumentLoader from ...wallet.base import BaseWallet @@ -20,22 +21,22 @@ def did_key(verkey: str) -> str: return DIDKey.from_public_key_b58(verkey, ED25519).did -def b64encode(str): +def b64encode(val: str) -> str: """Url Safe B64 Encode.""" - return str_to_b64(str, urlsafe=True, pad=False) + return str_to_b64(val, urlsafe=True, pad=False) -def b64decode(bytes): +def b64decode(val: str) -> str: """Url Safe B64 Decode.""" - return b64_to_str(bytes, urlsafe=True) + return b64_to_str(val, urlsafe=True) -def create_jws(encoded_header, verify_data): +def create_jws(encoded_header: str, verify_data: bytes) -> bytes: """Compose JWS.""" return (encoded_header + ".").encode("utf-8") + verify_data -async def jws_sign(session, verify_data, verkey): +async def jws_sign(session: ProfileSession, verify_data: bytes, verkey: str) -> str: """Sign JWS.""" header = {"alg": "EdDSA", "b64": False, "crit": ["b64"]} @@ -52,14 +53,16 @@ async def jws_sign(session, verify_data, verkey): return encoded_header + ".." + encoded_signature -def verify_jws_header(header): +def verify_jws_header(header: dict) -> None: """Check header requirements.""" if header != {"alg": "EdDSA", "b64": False, "crit": ["b64"]}: raise BadJWSHeaderError("Invalid JWS header parameters for Ed25519Signature2018.") -async def jws_verify(session, verify_data, signature, public_key): +async def jws_verify( + session: ProfileSession, verify_data: bytes, signature: str, public_key: str +) -> bool: """Detached jws verify handling.""" encoded_header, _, encoded_signature = signature.partition("..") @@ -79,11 +82,13 @@ async def jws_verify(session, verify_data, signature, public_key): return verified -async def sign_credential(session, credential, signature_options, verkey): +async def sign_credential( + session: ProfileSession, credential: dict, signature_options: dict, verkey: str +) -> dict: """Sign Credential.""" document_loader = session.profile.inject_or(DocumentLoader) - framed, verify_data_hex_string = create_verify_data( + _, verify_data_hex_string = create_verify_data( credential, signature_options, document_loader, @@ -93,7 +98,7 @@ async def sign_credential(session, credential, signature_options, verkey): return {**credential, "proof": {**signature_options, "jws": jws}} -async def verify_credential(session, doc, verkey): +async def verify_credential(session: ProfileSession, doc: dict, verkey: str) -> bool: """Verify credential.""" document_loader = session.profile.inject_or(DocumentLoader) From 25413e007c67d6386a5f64d630d899861f9136cc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Apr 2025 20:48:42 +0000 Subject: [PATCH 12/32] chore(deps): Bump markdown from 3.7 to 3.8 (#3644) Bumps [markdown](https://github.com/Python-Markdown/markdown) from 3.7 to 3.8. - [Release notes](https://github.com/Python-Markdown/markdown/releases) - [Changelog](https://github.com/Python-Markdown/markdown/blob/master/docs/changelog.md) - [Commits](https://github.com/Python-Markdown/markdown/compare/3.7...3.8) --- updated-dependencies: - dependency-name: markdown dependency-version: '3.8' dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: jamshale <31809382+jamshale@users.noreply.github.com> --- poetry.lock | 16 +++++++++------- pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4d376660ad..17323f1462 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -1309,6 +1309,8 @@ python-versions = "*" groups = ["main"] files = [ {file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"}, + {file = "jsonpath_ng-1.7.0-py2-none-any.whl", hash = "sha256:898c93fc173f0c336784a3fa63d7434297544b7198124a68f9a3ef9597b0ae6e"}, + {file = "jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6"}, ] [package.dependencies] @@ -1487,18 +1489,18 @@ source = ["Cython (>=3.0.11,<3.1.0)"] [[package]] name = "markdown" -version = "3.7" +version = "3.8" description = "Python implementation of John Gruber's Markdown." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, - {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, + {file = "markdown-3.8-py3-none-any.whl", hash = "sha256:794a929b79c5af141ef5ab0f2f642d0f7b1872981250230e72682346f7cc90dc"}, + {file = "markdown-3.8.tar.gz", hash = "sha256:7df81e63f0df5c4b24b7d156eb81e4690595239b7d70937d0409f1b0de319c6f"}, ] [package.extras] -docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +docs = ["mdx_gh_links (>=0.2)", "mkdocs (>=1.6)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] [[package]] @@ -2991,4 +2993,4 @@ didcommv2 = ["didcomm-messaging"] [metadata] lock-version = "2.1" python-versions = "^3.12" -content-hash = "08cd1d128c971c745a84bc0cb1c9698b80a37e5f444fe7c2d7e301a14dbaabaa" +content-hash = "80c84ae688f9f93ff8fe6c3db7f9b68ee7d268bffc7b9827d9f2f2c2ce7c98eb" diff --git a/pyproject.toml b/pyproject.toml index ffc7a6f9b2..7c3aab5fdd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,7 @@ ConfigArgParse = "~1.7" deepmerge = "^2.0" ecdsa = "~0.19.0" jsonpath-ng = "^1.7.0" -Markdown = "~3.7" +Markdown = ">=3.7,<3.9" markupsafe = "^3.0.2" marshmallow = "~3.26.1" nest_asyncio = "~1.6.0" From d5b6de673393654e6abc96bdd77f29b3e113c4eb Mon Sep 17 00:00:00 2001 From: Mourits de Beer <31511766+ff137@users.noreply.github.com> Date: Fri, 18 Apr 2025 21:20:32 +0200 Subject: [PATCH 13/32] :art: Make ledger config more readable (#3664) * :art: Make ledger config more readable Signed-off-by: ff137 * :art: Fix `write_ledger_set` checking all configs Signed-off-by: ff137 * :white_check_mark: Fix expected error message `No writable ledger configured` instead of `No is_write ledger set` Signed-off-by: ff137 * :art: Add logging to ledger arg parse Signed-off-by: ff137 * :art: Clarify single_configured, multi_configured Signed-off-by: ff137 * :art: Signed-off-by: ff137 * :art: Reduce duplication and complexity Signed-off-by: ff137 * :art: Add logging to `get_write_ledger_config_for_profile`, and simplify key reading Signed-off-by: ff137 * :art: Signed-off-by: ff137 * :art: Reduce complexity in binding multi-ledger provider Signed-off-by: ff137 --------- Signed-off-by: ff137 --- acapy_agent/askar/profile.py | 45 +++++---- acapy_agent/askar/profile_anon.py | 42 ++++---- acapy_agent/config/argparse.py | 55 +++++++--- acapy_agent/config/ledger.py | 112 +++++++++++---------- acapy_agent/config/tests/test_ledger.py | 2 +- acapy_agent/revocation/routes.py | 14 ++- acapy_agent/revocation_anoncreds/routes.py | 14 +-- acapy_agent/utils/multi_ledger.py | 57 ++++++----- 8 files changed, 194 insertions(+), 147 deletions(-) diff --git a/acapy_agent/askar/profile.py b/acapy_agent/askar/profile.py index 4afcea0d60..c7912436a0 100644 --- a/acapy_agent/askar/profile.py +++ b/acapy_agent/askar/profile.py @@ -128,33 +128,37 @@ def bind_providers(self): ClassProvider.Inject(Profile), ), ) - if ( - self.settings.get("ledger.ledger_config_list") - and len(self.settings.get("ledger.ledger_config_list")) >= 1 - ): + + ledger_config_list = self.settings.get("ledger.ledger_config_list") + if ledger_config_list: write_ledger_config = get_write_ledger_config_for_profile( settings=self.settings ) cache = self.context.injector.inject_or(BaseCache) + + pool_name = write_ledger_config.get("pool_name") + pool_id = write_ledger_config.get("id") + ledger_name = pool_name or pool_id + keepalive = write_ledger_config.get("keepalive") + read_only = write_ledger_config.get("read_only") + socks_proxy = write_ledger_config.get("socks_proxy") + genesis_transactions = write_ledger_config.get("genesis_transactions") + + ledger_pool = IndyVdrLedgerPool( + name=ledger_name, + keepalive=keepalive, + cache=cache, + genesis_transactions=genesis_transactions, + read_only=read_only, + socks_proxy=socks_proxy, + ) + injector.bind_provider( BaseLedger, - ClassProvider( - IndyVdrLedger, - IndyVdrLedgerPool( - write_ledger_config.get("pool_name") - or write_ledger_config.get("id"), - keepalive=write_ledger_config.get("keepalive"), - cache=cache, - genesis_transactions=write_ledger_config.get( - "genesis_transactions" - ), - read_only=write_ledger_config.get("read_only"), - socks_proxy=write_ledger_config.get("socks_proxy"), - ), - ref(self), - ), + ClassProvider(IndyVdrLedger, ledger_pool, ref(self)), ) - self.settings["ledger.write_ledger"] = write_ledger_config.get("id") + self.settings["ledger.write_ledger"] = pool_id + if ( "endorser_alias" in write_ledger_config and "endorser_did" in write_ledger_config @@ -169,6 +173,7 @@ def bind_providers(self): injector.bind_provider( BaseLedger, ClassProvider(IndyVdrLedger, self.ledger_pool, ref(self)) ) + if self.ledger_pool or self.settings.get("ledger.ledger_config_list"): injector.bind_provider( IndyVerifier, diff --git a/acapy_agent/askar/profile_anon.py b/acapy_agent/askar/profile_anon.py index b684dc952f..1551472f51 100644 --- a/acapy_agent/askar/profile_anon.py +++ b/acapy_agent/askar/profile_anon.py @@ -117,33 +117,35 @@ def bind_providers(self): IndyIssuer, ClassProvider("acapy_agent.indy.credx.issuer.IndyCredxIssuer", ref(self)), ) - if ( - self.settings.get("ledger.ledger_config_list") - and len(self.settings.get("ledger.ledger_config_list")) >= 1 - ): + + ledger_config_list = self.settings.get("ledger.ledger_config_list") + if ledger_config_list: write_ledger_config = get_write_ledger_config_for_profile( settings=self.settings ) cache = self.context.injector.inject_or(BaseCache) + + pool_name = write_ledger_config.get("pool_name") + pool_id = write_ledger_config.get("id") + ledger_name = pool_name or pool_id + keepalive = write_ledger_config.get("keepalive") + read_only = write_ledger_config.get("read_only") + socks_proxy = write_ledger_config.get("socks_proxy") + genesis_transactions = write_ledger_config.get("genesis_transactions") + + ledger_pool = IndyVdrLedgerPool( + name=ledger_name, + keepalive=keepalive, + cache=cache, + genesis_transactions=genesis_transactions, + read_only=read_only, + socks_proxy=socks_proxy, + ) injector.bind_provider( BaseLedger, - ClassProvider( - IndyVdrLedger, - IndyVdrLedgerPool( - write_ledger_config.get("pool_name") - or write_ledger_config.get("id"), - keepalive=write_ledger_config.get("keepalive"), - cache=cache, - genesis_transactions=write_ledger_config.get( - "genesis_transactions" - ), - read_only=write_ledger_config.get("read_only"), - socks_proxy=write_ledger_config.get("socks_proxy"), - ), - ref(self), - ), + ClassProvider(IndyVdrLedger, ledger_pool, ref(self)), ) - self.settings["ledger.write_ledger"] = write_ledger_config.get("id") + self.settings["ledger.write_ledger"] = pool_id if ( "endorser_alias" in write_ledger_config and "endorser_did" in write_ledger_config diff --git a/acapy_agent/config/argparse.py b/acapy_agent/config/argparse.py index 86f5b9cb1c..e6d898704e 100644 --- a/acapy_agent/config/argparse.py +++ b/acapy_agent/config/argparse.py @@ -2,6 +2,7 @@ import abc import json +import logging from functools import reduce from itertools import chain from os import environ @@ -16,6 +17,8 @@ from .plugin_settings import PLUGIN_CONFIG_KEY from .util import BoundedInt, ByteSize +LOGGER = logging.getLogger(__name__) + CAT_PROVISION = "general" CAT_START = "start" CAT_UPGRADE = "upgrade" @@ -907,56 +910,80 @@ def get_settings(self, args: Namespace) -> dict: if args.no_ledger: settings["ledger.disabled"] = True else: - single_configured = False - multi_configured = False update_pool_name = False write_ledger_specified = False if args.read_only_ledger: + LOGGER.debug("Setting read-only ledger") settings["read_only_ledger"] = True + + single_configured = True if args.genesis_url: + LOGGER.debug("Setting ledger.genesis_url = %s", args.genesis_url) settings["ledger.genesis_url"] = args.genesis_url - single_configured = True elif args.genesis_file: + LOGGER.debug("Setting ledger.genesis_file = %s", args.genesis_file) settings["ledger.genesis_file"] = args.genesis_file - single_configured = True elif args.genesis_transactions: + LOGGER.debug("Setting ledger.genesis_transactions") settings["ledger.genesis_transactions"] = args.genesis_transactions - single_configured = True + else: + LOGGER.debug("No genesis url, file, or transactions provided") + single_configured = False + + multi_configured = False if args.genesis_transactions_list: + LOGGER.debug("Processing genesis_transactions_list") with open(args.genesis_transactions_list, "r") as stream: + # Load YAML configuration for multiple ledgers txn_config_list = yaml.safe_load(stream) ledger_config_list = [] + + # Process each ledger configuration for txn_config in txn_config_list: - if "is_write" in txn_config and txn_config["is_write"]: + # Check if this is a write ledger + if txn_config.get("is_write", False): write_ledger_specified = True - if ( - "genesis_url" not in txn_config - and "genesis_file" not in txn_config - and "genesis_transactions" not in txn_config - ): + + # Ensure genesis information is provided + has_genesis_info = ( + "genesis_url" in txn_config + or "genesis_file" in txn_config + or "genesis_transactions" in txn_config + ) + if not has_genesis_info: raise ArgsParseError( "No genesis information provided for write ledger" ) + + # Use ID as pool_name if pool_name not specified if "id" in txn_config and "pool_name" not in txn_config: txn_config["pool_name"] = txn_config["id"] + update_pool_name = True ledger_config_list.append(txn_config) + + # Ensure write ledger is specified unless in read-only mode if not write_ledger_specified and not args.read_only_ledger: raise ArgsParseError( "No write ledger genesis provided in multi-ledger config" ) + + LOGGER.debug("Setting ledger.ledger_config_list") settings["ledger.ledger_config_list"] = ledger_config_list multi_configured = True + if not (single_configured or multi_configured): raise ArgsParseError( - "One of --genesis-url --genesis-file, --genesis-transactions " + "One of --genesis-url, --genesis-file, --genesis-transactions, " "or --genesis-transactions-list must be specified (unless " - "--no-ledger is specified to explicitly configure aca-py to" - " run with no ledger)." + "--no-ledger is specified to explicitly configure aca-py to " + "run with no ledger)." ) + if single_configured and multi_configured: raise ArgsParseError("Cannot configure both single- and multi-ledger.") + if args.ledger_pool_name and not update_pool_name: settings["ledger.pool_name"] = args.ledger_pool_name if args.ledger_keepalive: diff --git a/acapy_agent/config/ledger.py b/acapy_agent/config/ledger.py index e177d37fc8..563ea2e638 100644 --- a/acapy_agent/config/ledger.py +++ b/acapy_agent/config/ledger.py @@ -37,6 +37,26 @@ async def fetch_genesis_transactions(genesis_url: str) -> str: raise ConfigError("Error retrieving ledger genesis transactions") from e +async def fetch_genesis_from_url_or_file( + genesis_url: Optional[str], genesis_path: Optional[str] +) -> str: + """Fetch genesis transactions from URL or file.""" + txns = "" + if genesis_url: + txns = await fetch_genesis_transactions(genesis_url) + elif genesis_path: + try: + LOGGER.info("Reading ledger genesis transactions from: %s", genesis_path) + with open(genesis_path, "r") as genesis_file: + txns = genesis_file.read() + except IOError as e: + LOGGER.error("Failed to read genesis file: %s", str(e)) + raise ConfigError("Error reading ledger genesis transactions") from e + else: + LOGGER.warning("No genesis url or path found in settings") + return txns + + async def get_genesis_transactions(settings: Settings) -> str: """Fetch genesis transactions if necessary.""" @@ -45,62 +65,41 @@ async def get_genesis_transactions(settings: Settings) -> str: LOGGER.debug("Genesis transactions from settings: %s", "found" if txns else "absent") if not txns: LOGGER.debug("No genesis transactions found in settings") - if settings.get("ledger.genesis_url"): - txns = await fetch_genesis_transactions(settings["ledger.genesis_url"]) - elif settings.get("ledger.genesis_file"): - try: - genesis_path = settings["ledger.genesis_file"] - LOGGER.info("Reading ledger genesis transactions from: %s", genesis_path) - with open(genesis_path, "r") as genesis_file: - txns = genesis_file.read() - except IOError as e: - LOGGER.error("Failed to read genesis file: %s", str(e)) - raise ConfigError("Error reading ledger genesis transactions") from e + genesis_url = settings.get("ledger.genesis_url") + genesis_path = settings.get("ledger.genesis_file") + + txns = await fetch_genesis_from_url_or_file(genesis_url, genesis_path) if txns: LOGGER.debug("Storing genesis transactions in settings") settings["ledger.genesis_transactions"] = txns + return txns -async def load_multiple_genesis_transactions_from_config(settings: Settings): +async def load_multiple_genesis_transactions_from_config(settings: Settings) -> None: """Fetch genesis transactions for multiple ledger configuration.""" ledger_config_list = settings.get("ledger.ledger_config_list") ledger_txns_list = [] write_ledger_set = False - LOGGER.debug("Processing %d ledger configs", len(ledger_config_list)) + LOGGER.debug("Processing %d ledger configs", len(ledger_config_list)) for config in ledger_config_list: - txns = None - if "genesis_transactions" in config: - txns = config.get("genesis_transactions") + txns = config.get("genesis_transactions") + if not txns: - if "genesis_url" in config: - txns = await fetch_genesis_transactions(config.get("genesis_url")) - elif "genesis_file" in config: - try: - genesis_path = config.get("genesis_file") - LOGGER.info( - "Reading ledger genesis transactions from file: %s", genesis_path - ) - with open(genesis_path, "r") as genesis_file: - txns = genesis_file.read() - except IOError as e: - LOGGER.error("Failed to read genesis file: %s", str(e)) - raise ConfigError("Error reading ledger genesis transactions") from e - is_write_ledger = ( - False if config.get("is_write") is None else config.get("is_write") - ) - ledger_id = config.get("id") or str(uuid4()) + genesis_url = config.get("genesis_url") + genesis_path = config.get("genesis_file") + txns = await fetch_genesis_from_url_or_file(genesis_url, genesis_path) + + is_write_ledger = config.get("is_write", False) if is_write_ledger: write_ledger_set = True + + ledger_id = config.get("id", str(uuid4())) # Default to UUID if no ID provided config_item = { "id": ledger_id, - "is_production": ( - True - if config.get("is_production") is None - else config.get("is_production") - ), + "is_production": config.get("is_production", True), "is_write": is_write_ledger, "genesis_transactions": txns, "keepalive": int(config.get("keepalive", 5)), @@ -112,20 +111,25 @@ async def load_multiple_genesis_transactions_from_config(settings: Settings): config_item["endorser_alias"] = config.get("endorser_alias") if "endorser_did" in config: config_item["endorser_did"] = config.get("endorser_did") + ledger_txns_list.append(config_item) - if ( - not write_ledger_set - and not settings.get("ledger.read_only") - and not ( - settings.get("ledger.genesis_transactions") - or settings.get("ledger.genesis_file") - or settings.get("ledger.genesis_url") - ) - ): + + # Check if we have a writable ledger or genesis information + is_read_only = settings.get("ledger.read_only") + has_genesis_info = ( + settings.get("ledger.genesis_transactions") + or settings.get("ledger.genesis_file") + or settings.get("ledger.genesis_url") + ) + + # Raise error if we have neither a writable ledger nor genesis info (unless read-only) + if not write_ledger_set and not is_read_only and not has_genesis_info: raise ConfigError( - "No is_write ledger set and no genesis_url," - " genesis_file and genesis_transactions provided." + "No writable ledger configured and no genesis information provided. " + "Please set is_write=True for a ledger or provide genesis_url, " + "genesis_file, or genesis_transactions." ) + settings["ledger.ledger_config_list"] = ledger_txns_list LOGGER.debug("Processed %d ledger configs successfully", len(ledger_txns_list)) @@ -154,10 +158,8 @@ async def ledger_config( if taa_info["taa_required"] and public_did: LOGGER.debug("TAA acceptance required") taa_accepted = await ledger.get_latest_txn_author_acceptance() - if ( - not taa_accepted - or taa_info["taa_record"]["digest"] != taa_accepted["digest"] - ): + digest_match = taa_info["taa_record"]["digest"] == taa_accepted["digest"] + if not taa_accepted or not digest_match: LOGGER.info("TAA acceptance needed - performing acceptance") if not await accept_taa(ledger, profile, taa_info, provision): LOGGER.warning("TAA acceptance failed") @@ -290,10 +292,10 @@ async def accept_taa( ) if taa_acceptance_mechanism not in mechanisms: + valid_mechanisms = ", ".join(mechanisms.keys()) raise LedgerError( f"TAA acceptance mechanism '{taa_acceptance_mechanism}' is not a " - "valid acceptance mechanism. Valid mechanisms are: " - + str(list(mechanisms.keys())) + f"valid acceptance mechanism. Valid mechanisms are: {valid_mechanisms}" ) mechanism = taa_acceptance_mechanism diff --git a/acapy_agent/config/tests/test_ledger.py b/acapy_agent/config/tests/test_ledger.py index 3fb5af3f49..1cd490575a 100644 --- a/acapy_agent/config/tests/test_ledger.py +++ b/acapy_agent/config/tests/test_ledger.py @@ -498,7 +498,7 @@ async def test_load_multiple_genesis_transactions_config_error_a(self): ) with self.assertRaises(test_module.ConfigError) as cm: await test_module.load_multiple_genesis_transactions_from_config(settings) - assert "No is_write ledger set" in str(cm.exception) + assert "No writable ledger configured" in str(cm.exception) async def test_load_multiple_genesis_transactions_multiple_write(self): TEST_GENESIS_TXNS = { diff --git a/acapy_agent/revocation/routes.py b/acapy_agent/revocation/routes.py index 7bac4b06a6..1f1aa96b27 100644 --- a/acapy_agent/revocation/routes.py +++ b/acapy_agent/revocation/routes.py @@ -1007,17 +1007,13 @@ async def update_rev_reg_revoked_state(request: web.BaseRequest): is_anoncreds_profile_raise_web_exception(profile) rev_reg_id = request.match_info["rev_reg_id"] - apply_ledger_update = json.loads(request.query.get("apply_ledger_update", "false")) LOGGER.debug( - f"/revocation/registry/{rev_reg_id}/fix-revocation-entry-state request = {apply_ledger_update}" # noqa: E501 + "Update revocation state request for rev_reg_id = %s, apply_ledger_update = %s", + rev_reg_id, + apply_ledger_update, ) - def _log_ledger_info(available_write_ledgers, write_ledger, pool): - LOGGER.debug(f"available write_ledgers = {available_write_ledgers}") - LOGGER.debug(f"write_ledger = {write_ledger}") - LOGGER.debug(f"write_ledger pool = {pool}") - rev_reg_record = None genesis_transactions = None async with profile.session() as session: @@ -1035,7 +1031,9 @@ def _log_ledger_info(available_write_ledgers, write_ledger, pool): available_write_ledgers = await ledger_manager.get_write_ledgers() pool = write_ledger.pool genesis_transactions = pool.genesis_txns - _log_ledger_info(available_write_ledgers, write_ledger, pool) + LOGGER.debug("available write_ledgers = %s", available_write_ledgers) + LOGGER.debug("write_ledger = %s", write_ledger) + LOGGER.debug("write_ledger pool = %s", pool) if not genesis_transactions: raise web.HTTPInternalServerError( diff --git a/acapy_agent/revocation_anoncreds/routes.py b/acapy_agent/revocation_anoncreds/routes.py index 876106b6a0..d57e19b5fc 100644 --- a/acapy_agent/revocation_anoncreds/routes.py +++ b/acapy_agent/revocation_anoncreds/routes.py @@ -856,10 +856,12 @@ async def update_rev_reg_revoked_state(request: web.BaseRequest): is_not_anoncreds_profile_raise_web_exception(profile) rev_reg_id = request.match_info["rev_reg_id"] - - apply_ledger_update_json = request.query.get("apply_ledger_update", "false") - LOGGER.debug(">>> apply_ledger_update_json = %s", apply_ledger_update_json) apply_ledger_update = json.loads(request.query.get("apply_ledger_update", "false")) + LOGGER.debug( + "Update revocation state request for rev_reg_id = %s, apply_ledger_update = %s", + rev_reg_id, + apply_ledger_update, + ) genesis_transactions = None recovery_txn = {} @@ -879,10 +881,10 @@ async def update_rev_reg_revoked_state(request: web.BaseRequest): ledger_manager = context.injector.inject(BaseMultipleLedgerManager) write_ledger = context.injector.inject(BaseLedger) available_write_ledgers = await ledger_manager.get_write_ledgers() - LOGGER.debug(f"available write_ledgers = {available_write_ledgers}") - LOGGER.debug(f"write_ledger = {write_ledger}") + LOGGER.debug("available write_ledgers = %s", available_write_ledgers) + LOGGER.debug("write_ledger = %s", write_ledger) pool = write_ledger.pool - LOGGER.debug(f"write_ledger pool = {pool}") + LOGGER.debug("write_ledger pool = %s", pool) genesis_transactions = pool.genesis_txns diff --git a/acapy_agent/utils/multi_ledger.py b/acapy_agent/utils/multi_ledger.py index acfcc30132..2dad4871ae 100644 --- a/acapy_agent/utils/multi_ledger.py +++ b/acapy_agent/utils/multi_ledger.py @@ -1,51 +1,62 @@ """Multiledger related utility methods.""" +import logging from collections import OrderedDict from ..config.settings import BaseSettings from ..core.error import ProfileError +LOGGER = logging.getLogger(__name__) + def get_write_ledger_config_for_profile(settings: BaseSettings) -> dict: """Return initial/default write ledger config on profile creation.""" write_ledger_config = None prod_write_ledger_pool = OrderedDict() non_prod_write_ledger_pool = OrderedDict() + + LOGGER.debug("Getting write ledger config for profile") for ledger_config in settings.get("ledger.ledger_config_list"): - if ledger_config.get("is_production") and ( - ledger_config.get("is_write") or settings.get("ledger.read_only") - ): - prod_write_ledger_pool[ - ledger_config.get("id") or ledger_config.get("pool_name") - ] = ledger_config - elif not ledger_config.get("is_production") and ( - ledger_config.get("is_write") or settings.get("ledger.read_only") - ): - non_prod_write_ledger_pool[ - ledger_config.get("id") or ledger_config.get("pool_name") - ] = ledger_config - if "ledger.write_ledger" in settings: - if settings.get("ledger.write_ledger") in prod_write_ledger_pool: - write_ledger_config = prod_write_ledger_pool.get( - settings.get("ledger.write_ledger") - ) - elif settings.get("ledger.write_ledger") in non_prod_write_ledger_pool: - write_ledger_config = non_prod_write_ledger_pool.get( - settings.get("ledger.write_ledger") + is_production = ledger_config.get("is_production") + is_write = ledger_config.get("is_write") + is_read_only = ledger_config.get("read_only") + ledger_id = ledger_config.get("id") or ledger_config.get("pool_name") + + if is_production and (is_write or is_read_only): + prod_write_ledger_pool[ledger_id] = ledger_config + elif not is_production and (is_write or is_read_only): + non_prod_write_ledger_pool[ledger_id] = ledger_config + else: + LOGGER.warning( + "Ledger config %s is not a write ledger nor a read-only ledger", + ledger_id, ) + + write_ledger = settings.get("ledger.write_ledger") + if write_ledger: + if write_ledger in prod_write_ledger_pool: + write_ledger_config = prod_write_ledger_pool.get(write_ledger) + elif write_ledger in non_prod_write_ledger_pool: + write_ledger_config = non_prod_write_ledger_pool.get(write_ledger) else: - raise ProfileError( - "The ledger.write_ledger in profile settings does not correspond to a" - " write configurable ledger provided with --genesis-transactions-list" + error_message = ( + "ledger.write_ledger in profile settings does not correspond to a " + "write configurable ledger provided with --genesis-transactions-list" ) + LOGGER.error(error_message) + raise ProfileError(error_message) else: if len(prod_write_ledger_pool) >= 1: + LOGGER.debug("Using first production write ledger") write_ledger_config = (list(prod_write_ledger_pool.values()))[0] elif len(non_prod_write_ledger_pool) >= 1: + LOGGER.debug("Using first non-production write ledger") write_ledger_config = (list(non_prod_write_ledger_pool.values()))[0] else: + LOGGER.error("No write ledger configuration found in ledger_config_list") raise ProfileError( "No write ledger configuration found in ledger_config_list which " "was provided with --genesis-transactions-list" ) + return write_ledger_config From a11ebce389c123561f3978fc4970d75b52cf910d Mon Sep 17 00:00:00 2001 From: George Mulhearn <57472912+gmulhearn@users.noreply.github.com> Date: Sat, 19 Apr 2025 19:08:09 +1000 Subject: [PATCH 14/32] (fix) VM resolution strategy correction for embedded VMs (#3665) * fix for inline VMs Signed-off-by: George Mulhearn * Signed-off-by: George Mulhearn * retrigger CI Signed-off-by: George Mulhearn --------- Signed-off-by: George Mulhearn Co-authored-by: George Mulhearn --- .../default_verification_key_strategy.py | 5 +++-- .../test_default_verification_key_strategy.py | 20 ++++++++++++++++++- 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/acapy_agent/wallet/default_verification_key_strategy.py b/acapy_agent/wallet/default_verification_key_strategy.py index a07dedfe4c..f408b8f926 100644 --- a/acapy_agent/wallet/default_verification_key_strategy.py +++ b/acapy_agent/wallet/default_verification_key_strategy.py @@ -4,7 +4,7 @@ from abc import ABC, abstractmethod from typing import Literal, Optional -from pydid import DIDDocument +from pydid import DIDDocument, VerificationMethod from ..core.error import BaseError from ..core.profile import Profile @@ -68,6 +68,7 @@ def __init__(self): self.key_types_mapping = { "Ed25519Signature2018": ["Ed25519VerificationKey2018"], "Ed25519Signature2020": ["Ed25519VerificationKey2020", "Multikey"], + "BbsBlsSignature2020": ["Bls12381G2Key2020"], } async def get_verification_method_id_for_did( @@ -109,7 +110,7 @@ async def get_verification_method_id_for_did( methods = [ await resolver.dereference_verification_method(profile, method, document=doc) if isinstance(method, str) - else method + else VerificationMethod.deserialize(method) for method in methods_or_refs ] diff --git a/acapy_agent/wallet/tests/test_default_verification_key_strategy.py b/acapy_agent/wallet/tests/test_default_verification_key_strategy.py index bd96e5d19e..f5f9d75e01 100644 --- a/acapy_agent/wallet/tests/test_default_verification_key_strategy.py +++ b/acapy_agent/wallet/tests/test_default_verification_key_strategy.py @@ -47,7 +47,16 @@ async def asyncSetUp(self) -> None: }, ], "authentication": ["did:example:123#key-1"], - "assertionMethod": ["did:example:123#key-2", "did:example:123#key-3"], + "assertionMethod": [ + "did:example:123#key-2", + "did:example:123#key-3", + { + "id": "did:example:123#key-4", + "type": "Bls12381G2Key2020", + "controller": "did:example:123", + "publicKeyBase58": "25EEkQtcLKsEzQ6JTo9cg4W7NHpaurn4Wg6LaNPFq6JQXnrP91SDviUz7KrJVMJd76CtAZFsRLYzvgX2JGxo2ccUHtuHk7ELCWwrkBDfrXCFVfqJKDootee9iVaF6NpdJtBE", + }, + ], }, ) ) @@ -87,6 +96,15 @@ async def test_with_did_for_assertion(self): ) == "did:example:123#key-3" ) + assert ( + await strategy.get_verification_method_id_for_did( + "did:example:123", + self.profile, + proof_type="BbsBlsSignature2020", + proof_purpose="assertionMethod", + ) + == "did:example:123#key-4" + ) async def test_unsupported_did_method(self): strategy = DefaultVerificationKeyStrategy() From 026971ed391e60974098229293b4f45ba9e5a423 Mon Sep 17 00:00:00 2001 From: Mourits de Beer <31511766+ff137@users.noreply.github.com> Date: Sat, 19 Apr 2025 11:47:43 +0200 Subject: [PATCH 15/32] :art: Rename did:indy create/response schema objects (#3663) * :art: Rename did:indy create/response schema objects Signed-off-by: ff137 * :art: Add more detail to options description Signed-off-by: ff137 * :art: Clarify defaults Signed-off-by: ff137 --------- Signed-off-by: ff137 Co-authored-by: Stephen Curran --- acapy_agent/did/indy/indy_manager.py | 12 +++++++----- acapy_agent/did/indy/routes.py | 24 ++++++++++++++---------- 2 files changed, 21 insertions(+), 15 deletions(-) diff --git a/acapy_agent/did/indy/indy_manager.py b/acapy_agent/did/indy/indy_manager.py index c745f51835..26c1c3ef4d 100644 --- a/acapy_agent/did/indy/indy_manager.py +++ b/acapy_agent/did/indy/indy_manager.py @@ -24,16 +24,17 @@ async def _get_holder_defined_did(self, options: dict) -> str | None: async with self.profile.session() as session: did_methods = session.inject(DIDMethods) indy_method = did_methods.from_method(INDY.method_name) + did = options.get("did") - if indy_method.holder_defined_did() and options.get("did"): - return strip_did_prefix(options.get("did")) + if indy_method.holder_defined_did() and did: + return strip_did_prefix(did) return None - async def _get_key_type(self, key_type: str) -> KeyType: + async def _get_key_type(self, key_type: str, default: KeyType = ED25519) -> KeyType: async with self.profile.session() as session: key_types = session.inject(KeyTypes) - return key_types.from_key_type(key_type) or ED25519 + return key_types.from_key_type(key_type) or default def _create_key_pair(self, options: dict, key_type: KeyType) -> Key: seed = options.get("seed") @@ -45,8 +46,9 @@ def _create_key_pair(self, options: dict, key_type: KeyType) -> Key: async def register(self, options: dict) -> dict: """Register a DID Indy.""" options = options or {} + key_type = options.get("key_type", "") - key_type = await self._get_key_type(options.get("key_type") or ED25519) + key_type = await self._get_key_type(key_type) did_validation = DIDParametersValidation(self.profile.inject(DIDMethods)) did_validation.validate_key_type(INDY, key_type) diff --git a/acapy_agent/did/indy/routes.py b/acapy_agent/did/indy/routes.py index cae5e0c7ad..0f208b3018 100644 --- a/acapy_agent/did/indy/routes.py +++ b/acapy_agent/did/indy/routes.py @@ -13,13 +13,17 @@ from ...wallet.error import WalletError -class CreateRequestSchema(OpenAPISchema): - """Parameters and validators for create DID endpoint.""" +class CreateDidIndyRequestSchema(OpenAPISchema): + """Parameters and validators for create DID Indy endpoint.""" options = fields.Dict( required=False, metadata={ - "description": "Additional configuration options", + "description": ( + "Additional configuration options. " + "Supported options: did, seed, key_type. " + "Default key_type is ed25519." + ), "example": { "did": "did:indy:WRfXPg8dantKVubE3HX8pw", "seed": "000000000000000000000000Trustee1", @@ -36,8 +40,8 @@ class CreateRequestSchema(OpenAPISchema): ) -class CreateResponseSchema(OpenAPISchema): - """Response schema for create DID endpoint.""" +class CreateDidIndyResponseSchema(OpenAPISchema): + """Response schema for create DID Indy endpoint.""" did = fields.Str( metadata={ @@ -54,17 +58,17 @@ class CreateResponseSchema(OpenAPISchema): @docs(tags=["did"], summary="Create a did:indy") -@request_schema(CreateRequestSchema()) -@response_schema(CreateResponseSchema, HTTPStatus.OK) +@request_schema(CreateDidIndyRequestSchema()) +@response_schema(CreateDidIndyResponseSchema, HTTPStatus.OK) @tenant_authentication async def create_indy_did(request: web.BaseRequest): """Create a INDY DID.""" context: AdminRequestContext = request["context"] body = await request.json() + options = body.get("options", {}) try: - return web.json_response( - (await DidIndyManager(context.profile).register(body.get("options"))), - ) + result = await DidIndyManager(context.profile).register(options) + return web.json_response(result) except WalletError as e: raise web.HTTPBadRequest(reason=str(e)) From 88092d6a3cf53d95d119cb58b0a14cb1215711b8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Apr 2025 10:51:23 -0700 Subject: [PATCH 16/32] chore(deps): Bump packaging from 24.2 to 25.0 (#3667) Bumps [packaging](https://github.com/pypa/packaging) from 24.2 to 25.0. - [Release notes](https://github.com/pypa/packaging/releases) - [Changelog](https://github.com/pypa/packaging/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pypa/packaging/compare/24.2...25.0) --- updated-dependencies: - dependency-name: packaging dependency-version: '25.0' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 17323f1462..7c6dfd89cd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1355,7 +1355,7 @@ files = [ {file = "lxml-5.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aa837e6ee9534de8d63bc4c1249e83882a7ac22bd24523f83fad68e6ffdf41ae"}, {file = "lxml-5.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:da4c9223319400b97a2acdfb10926b807e51b69eb7eb80aad4942c0516934858"}, {file = "lxml-5.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:dc0e9bdb3aa4d1de703a437576007d366b54f52c9897cae1a3716bb44fc1fc85"}, - {file = "lxml-5.3.2-cp310-cp310-win32.whl", hash = "sha256:5f94909a1022c8ea12711db7e08752ca7cf83e5b57a87b59e8a583c5f35016ad"}, + {file = "lxml-5.3.2-cp310-cp310-win32.win32.whl", hash = "sha256:dd755a0a78dd0b2c43f972e7b51a43be518ebc130c9f1a7c4480cf08b4385486"}, {file = "lxml-5.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:d64ea1686474074b38da13ae218d9fde0d1dc6525266976808f41ac98d9d7980"}, {file = "lxml-5.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9d61a7d0d208ace43986a92b111e035881c4ed45b1f5b7a270070acae8b0bfb4"}, {file = "lxml-5.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:856dfd7eda0b75c29ac80a31a6411ca12209183e866c33faf46e77ace3ce8a79"}, @@ -1762,14 +1762,14 @@ files = [ [[package]] name = "packaging" -version = "24.2" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] @@ -2993,4 +2993,4 @@ didcommv2 = ["didcomm-messaging"] [metadata] lock-version = "2.1" python-versions = "^3.12" -content-hash = "80c84ae688f9f93ff8fe6c3db7f9b68ee7d268bffc7b9827d9f2f2c2ce7c98eb" +content-hash = "7866e5e48493c7b2f779fec8fcca916a44d48318a022e797b1d23bc19b4c9000" diff --git a/pyproject.toml b/pyproject.toml index 7c3aab5fdd..a4206de299 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ Markdown = ">=3.7,<3.9" markupsafe = "^3.0.2" marshmallow = "~3.26.1" nest_asyncio = "~1.6.0" -packaging = "^24.2" +packaging = ">=24.2,<26.0" portalocker = "^3.1.1" prompt_toolkit = ">=2.0.9,<3.1.0" pydid = "^0.5.1" From a969a8dc06f7afdc860c7d1591dd1c1f9aeb6547 Mon Sep 17 00:00:00 2001 From: StepSecurity Bot Date: Wed, 23 Apr 2025 08:17:14 -0700 Subject: [PATCH 17/32] :pushpin: Pin Actions to a full length commit SHA and image tags to digests (#3668) Signed-off-by: StepSecurity Bot --- .github/workflows/bdd-integration-tests.yml | 4 ++-- .github/workflows/bdd-interop-tests.yml | 6 +++--- .github/workflows/codeql.yml | 6 +++--- .github/workflows/format.yml | 6 +++--- .github/workflows/nightly.yml | 4 ++-- .github/workflows/pip-audit.yml | 4 ++-- .github/workflows/pr-tests.yml | 2 +- .github/workflows/publish-docs.yml | 6 +++--- .github/workflows/publish.yml | 16 ++++++++-------- .github/workflows/pythonpublish.yml | 6 +++--- .github/workflows/scenario-integration-tests.yml | 6 +++--- .github/workflows/scorecard.yml | 8 ++++---- .github/workflows/snyk-lts.yml | 6 +++--- .github/workflows/snyk.yml | 6 +++--- .github/workflows/sonar-merge-main.yml | 4 ++-- .github/workflows/sonar-pr.yml | 12 ++++++------ demo/docker-test/db/Dockerfile | 2 +- demo/elk-stack/extensions/curator/Dockerfile | 2 +- demo/elk-stack/extensions/logspout/Dockerfile | 2 +- scenarios/Dockerfile | 2 +- 20 files changed, 55 insertions(+), 55 deletions(-) diff --git a/.github/workflows/bdd-integration-tests.yml b/.github/workflows/bdd-integration-tests.yml index b36dc0544e..207763804a 100644 --- a/.github/workflows/bdd-integration-tests.yml +++ b/.github/workflows/bdd-integration-tests.yml @@ -26,12 +26,12 @@ jobs: is_release: ${{ steps.check_if_release.outputs.is_release }} steps: - name: checkout-acapy - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 0 - name: Check changed files id: check-changed-files - uses: tj-actions/changed-files@v46.0.5 + uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5 with: files_yaml: | src: diff --git a/.github/workflows/bdd-interop-tests.yml b/.github/workflows/bdd-interop-tests.yml index 0554421a9f..f82e2f5517 100644 --- a/.github/workflows/bdd-interop-tests.yml +++ b/.github/workflows/bdd-interop-tests.yml @@ -26,12 +26,12 @@ jobs: is_release: ${{ steps.check_if_release.outputs.is_release }} steps: - name: checkout-acapy - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 0 - name: Check changed files id: check-changed-files - uses: tj-actions/changed-files@v46.0.5 + uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5 with: files_yaml: | src: @@ -52,7 +52,7 @@ jobs: id: check_if_release - name: Request GitHub API for PR data if: steps.check-if-src-changed.outputs.run_tests != 'false' - uses: octokit/request-action@v2.x + uses: octokit/request-action@05a2312de9f8207044c4c9e41fe19703986acc13 # v2.x env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} id: get_pr_data diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 324cf15550..49ebb1204e 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -17,13 +17,13 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v3 + uses: github/codeql-action/init@45775bd8235c68ba998cffa5171334d58593da47 # v3.28.15 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + uses: github/codeql-action/analyze@45775bd8235c68ba998cffa5171334d58593da47 # v3.28.15 diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index df8efffebf..8185571eba 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -10,12 +10,12 @@ jobs: name: lint runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: "3.12" - name: Ruff Format and Lint Check - uses: chartboost/ruff-action@v1 + uses: chartboost/ruff-action@e18ae971ccee1b2d7bbef113930f00c670b78da4 # v1.0.0 with: version: 0.11.4 args: "format --check" diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 081109d34d..29153d51a4 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -17,7 +17,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Run Tests uses: ./.github/actions/run-unit-tests @@ -34,7 +34,7 @@ jobs: date: ${{ steps.date.outputs.date }} if: github.repository_owner == 'openwallet-foundation' || github.event_name == 'workflow_dispatch' steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Print Latest Commit run: echo ${{ github.sha }} diff --git a/.github/workflows/pip-audit.yml b/.github/workflows/pip-audit.yml index dfb0c48e9b..f4e57583a3 100644 --- a/.github/workflows/pip-audit.yml +++ b/.github/workflows/pip-audit.yml @@ -11,14 +11,14 @@ jobs: runs-on: ubuntu-latest if: (github.event_name == 'pull_request' && github.repository == 'openwallet-foundation/acapy') || (github.event_name != 'pull_request') steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: install run: | python -m venv env/ source env/bin/activate python -m pip install --upgrade pip python -m pip install . - - uses: pypa/gh-action-pip-audit@v1.1.0 + - uses: pypa/gh-action-pip-audit@1220774d901786e6f652ae159f7b6bc8fea6d266 # v1.1.0 with: virtual-environment: env/ local: true diff --git a/.github/workflows/pr-tests.yml b/.github/workflows/pr-tests.yml index 0403ca1548..44a6aac572 100644 --- a/.github/workflows/pr-tests.yml +++ b/.github/workflows/pr-tests.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest steps: - name: checkout - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Tests uses: ./.github/actions/run-unit-tests with: diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index 54dc8c7577..0dcf9e1c1a 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -14,13 +14,13 @@ jobs: deploy: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 0 # fetch all commits/branches - - uses: actions/setup-python@v5 + - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: 3.x - - uses: actions/cache@v4 + - uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 with: key: ${{ github.ref }} path: .cache diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index d059def99b..a759bf8f2d 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -43,20 +43,20 @@ jobs: packages: write steps: - name: Checkout Code - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ inputs.ref || '' }} persist-credentials: false - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 with: cache-binary: false install: true version: latest - name: Build and Cache Image - uses: docker/build-push-action@v6 + uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0 with: push: false context: . @@ -98,20 +98,20 @@ jobs: steps: - name: Checkout Code - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ inputs.ref || '' }} persist-credentials: false - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 with: cache-binary: false install: true version: latest - name: Log in to the GitHub Container Registry - uses: docker/login-action@v3 + uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -124,7 +124,7 @@ jobs: - name: Setup Image Metadata id: meta - uses: docker/metadata-action@v5 + uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0 with: images: | ghcr.io/${{ steps.lower.outputs.owner }}/${{ matrix.image-name }} @@ -132,7 +132,7 @@ jobs: type=raw,value=py${{ matrix.python-version }}-${{ inputs.tag || github.event.release.tag_name }} - name: Publish Image to GHCR.io - uses: docker/build-push-action@v6 + uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0 with: push: true context: . diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index c74ef590df..f7d5ffdb12 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -13,9 +13,9 @@ jobs: permissions: id-token: write # IMPORTANT: this permission is mandatory for trusted publishing steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: "3.x" - name: Install build and publish dependencies @@ -26,4 +26,4 @@ jobs: run: | poetry build - name: Publish package distributions to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 + uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # release/v1 diff --git a/.github/workflows/scenario-integration-tests.yml b/.github/workflows/scenario-integration-tests.yml index 115acb1e4c..afaf520d24 100644 --- a/.github/workflows/scenario-integration-tests.yml +++ b/.github/workflows/scenario-integration-tests.yml @@ -24,12 +24,12 @@ jobs: if: (github.repository == 'openwallet-foundation/acapy') && ((github.event_name == 'pull_request' && github.event.pull_request.draft == false) || (github.event_name != 'pull_request')) steps: - name: checkout-acapy - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 0 - name: Check changed files id: check-changed-files - uses: tj-actions/changed-files@v46.0.5 + uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5 with: files_yaml: | scenarios: "scenarios/**/*" @@ -48,7 +48,7 @@ jobs: if: steps.check-if-scenarios-or-src-changed.outputs.run_tests != 'false' run: pipx install poetry id: setup-poetry - - uses: actions/setup-python@v5 + - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 if: steps.check-if-scenarios-or-src-changed.outputs.run_tests != 'false' with: python-version: "3.12" diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 23ebb6f32e..84f1670cf8 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -35,12 +35,12 @@ jobs: steps: - name: "Checkout code" - uses: actions/checkout@v4 # was v4.1.1 - b4ffde65f46336ab88eb53be808477a3936bae11 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@v2.4.1 # was v2.3.1 - 0864cf19026789058feabb7e87baa5f140aac736 + uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1 with: results_file: results.sarif results_format: sarif @@ -62,7 +62,7 @@ jobs: # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" - uses: actions/upload-artifact@v4 # was v3.pre.node20 97a0fba1372883ab732affbe8f94b823f91727db + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: SARIF file path: results.sarif @@ -71,6 +71,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard (optional). # Commenting out will disable upload of results to your repo's Code Scanning dashboard - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@v3 # was v3.24.9 - 1b1aada464948af03b950897e5eb522f92603cc2 + uses: github/codeql-action/upload-sarif@45775bd8235c68ba998cffa5171334d58593da47 # v3.28.15 with: sarif_file: results.sarif \ No newline at end of file diff --git a/.github/workflows/snyk-lts.yml b/.github/workflows/snyk-lts.yml index 901a3aa185..b26a8371c0 100644 --- a/.github/workflows/snyk-lts.yml +++ b/.github/workflows/snyk-lts.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest if: ${{ github.repository_owner == 'openwallet-foundation' }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Build a Docker image run: docker build -t acapy-agent -f docker/Dockerfile . @@ -25,7 +25,7 @@ jobs: # Snyk can be used to break the build when it detects vulnerabilities. # In this case we want to upload the issues to GitHub Code Scanning continue-on-error: true - uses: snyk/actions/docker@0.4.0 + uses: snyk/actions/docker@b98d498629f1c368650224d6d212bf7dfa89e4bf # 0.4.0 env: # In order to use the Snyk Action you will need to have a Snyk API token. # More details in https://github.com/snyk/actions#getting-your-snyk-token @@ -46,6 +46,6 @@ jobs: sed -i 's/"security-severity": "null"/"security-severity": "0"/g' snyk.sarif - name: Upload result to GitHub Code Scanning - uses: github/codeql-action/upload-sarif@v3 + uses: github/codeql-action/upload-sarif@45775bd8235c68ba998cffa5171334d58593da47 # v3.28.15 with: sarif_file: snyk.sarif diff --git a/.github/workflows/snyk.yml b/.github/workflows/snyk.yml index a6717023e4..778c019c77 100644 --- a/.github/workflows/snyk.yml +++ b/.github/workflows/snyk.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest if: ${{ github.repository_owner == 'openwallet-foundation' }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Build a Docker image run: docker build -t acapy-agent -f docker/Dockerfile . @@ -21,7 +21,7 @@ jobs: # Snyk can be used to break the build when it detects vulnerabilities. # In this case we want to upload the issues to GitHub Code Scanning continue-on-error: true - uses: snyk/actions/docker@0.4.0 + uses: snyk/actions/docker@b98d498629f1c368650224d6d212bf7dfa89e4bf # 0.4.0 env: # In order to use the Snyk Action you will need to have a Snyk API token. # More details in https://github.com/snyk/actions#getting-your-snyk-token @@ -39,6 +39,6 @@ jobs: sed -i 's/"security-severity": "null"/"security-severity": "0"/g' snyk.sarif - name: Upload result to GitHub Code Scanning - uses: github/codeql-action/upload-sarif@v3 + uses: github/codeql-action/upload-sarif@45775bd8235c68ba998cffa5171334d58593da47 # v3.28.15 with: sarif_file: snyk.sarif diff --git a/.github/workflows/sonar-merge-main.yml b/.github/workflows/sonar-merge-main.yml index d8390b56e6..a2340cf709 100644 --- a/.github/workflows/sonar-merge-main.yml +++ b/.github/workflows/sonar-merge-main.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest if: github.repository == 'openwallet-foundation/acapy' steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 0 - name: Tests @@ -20,7 +20,7 @@ jobs: os: "ubuntu-latest" is_pr: "false" - name: SonarCloud Scan - uses: SonarSource/sonarqube-scan-action@master + uses: SonarSource/sonarqube-scan-action@aa494459d7c39c106cc77b166de8b4250a32bb97 # master env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} diff --git a/.github/workflows/sonar-pr.yml b/.github/workflows/sonar-pr.yml index 2d83291bce..4bbcc04619 100644 --- a/.github/workflows/sonar-pr.yml +++ b/.github/workflows/sonar-pr.yml @@ -11,28 +11,28 @@ jobs: runs-on: ubuntu-latest if: github.event.workflow_run.conclusion == 'success' && github.repository == 'openwallet-foundation/acapy' steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 0 - name: Download PR number artifact - uses: dawidd6/action-download-artifact@v9 + uses: dawidd6/action-download-artifact@07ab29fd4a977ae4d2b275087cf67563dfdf0295 # v9 with: workflow: Tests run_id: ${{ github.event.workflow_run.id }} name: PR_NUMBER - name: Read PR_NUMBER id: pr_number - uses: juliangruber/read-file-action@v1 + uses: juliangruber/read-file-action@b549046febe0fe86f8cb4f93c24e284433f9ab58 # v1.1.7 with: path: ./PR_NUMBER - name: Download Test Coverage - uses: dawidd6/action-download-artifact@v9 + uses: dawidd6/action-download-artifact@07ab29fd4a977ae4d2b275087cf67563dfdf0295 # v9 with: workflow: Tests run_id: ${{ github.event.workflow_run.id }} name: TEST_COV - name: Request GitHub API for PR data - uses: octokit/request-action@v2.x + uses: octokit/request-action@05a2312de9f8207044c4c9e41fe19703986acc13 # v2.x id: get_pr_data with: route: GET /repos/${{ github.event.repository.full_name }}/pulls/${{ steps.pr_number.outputs.content }} @@ -52,7 +52,7 @@ jobs: git checkout -B temp-branch-for-scanning upstream/${{ fromJson(steps.get_pr_data.outputs.data).head.ref }} - name: SonarCloud Scan - uses: SonarSource/sonarqube-scan-action@master + uses: SonarSource/sonarqube-scan-action@aa494459d7c39c106cc77b166de8b4250a32bb97 # master env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} diff --git a/demo/docker-test/db/Dockerfile b/demo/docker-test/db/Dockerfile index bb7277f18a..aed8acce6f 100644 --- a/demo/docker-test/db/Dockerfile +++ b/demo/docker-test/db/Dockerfile @@ -1,3 +1,3 @@ -FROM postgres:17 +FROM postgres:17@sha256:fe3f571d128e8efadcd8b2fde0e2b73ebab6dbec33f6bfe69d98c682c7d8f7bd COPY ./init-postgres-role.sh /docker-entrypoint-initdb.d/init-postgres-role.sh CMD ["docker-entrypoint.sh", "postgres"] \ No newline at end of file diff --git a/demo/elk-stack/extensions/curator/Dockerfile b/demo/elk-stack/extensions/curator/Dockerfile index ec4314dcce..69215e440f 100644 --- a/demo/elk-stack/extensions/curator/Dockerfile +++ b/demo/elk-stack/extensions/curator/Dockerfile @@ -1,4 +1,4 @@ -FROM untergeek/curator:8.0.16 +FROM untergeek/curator:8.0.16@sha256:8ab15516eb320bddb042c6da3c81b57e4e69a7aac04efc32190db979fe0bfb5b USER root diff --git a/demo/elk-stack/extensions/logspout/Dockerfile b/demo/elk-stack/extensions/logspout/Dockerfile index 9591df53b0..64d6fd56b9 100644 --- a/demo/elk-stack/extensions/logspout/Dockerfile +++ b/demo/elk-stack/extensions/logspout/Dockerfile @@ -1,5 +1,5 @@ # uses ONBUILD instructions described here: # https://github.com/gliderlabs/logspout/tree/master/custom -FROM gliderlabs/logspout:master +FROM gliderlabs/logspout:master@sha256:2d81c026e11ac67f7887029dbfd7d36ee986d946066b45c1dabd966278eb5681 ENV SYSLOG_FORMAT rfc3164 diff --git a/scenarios/Dockerfile b/scenarios/Dockerfile index 383137450a..0c0a5ce285 100644 --- a/scenarios/Dockerfile +++ b/scenarios/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10 +FROM python:3.10@sha256:e2c7fb05741c735679b26eda7dd34575151079f8c615875fbefe401972b14d85 WORKDIR /usr/src/app/ From 15a17bf4df95811d093d85fd96ab11deabcbede5 Mon Sep 17 00:00:00 2001 From: Mourits de Beer <31511766+ff137@users.noreply.github.com> Date: Wed, 23 Apr 2025 17:59:42 +0200 Subject: [PATCH 18/32] :construction_worker: Update dependabot file (#3669) Flattens duplication by specifying "directories". Adds `/scenarios` directory to pip and docker management Signed-off-by: ff137 --- .github/dependabot.yml | 217 +++++------------------------------------ 1 file changed, 26 insertions(+), 191 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 6fa45ac069..7ff611cdb7 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -15,19 +15,12 @@ updates: # Maintain dependencies for Python Packages - package-ecosystem: "pip" - directory: "/" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - ignore: - - dependency-name: "*" - update-types: ["version-update:semver-patch"] - - # Maintain dependencies for Python Packages - - package-ecosystem: "pip" - directory: "/demo/playground/examples" + directories: + - "/" + - "/demo" + - "/demo/playground/examples" + - "/docs" + - "/scenarios" schedule: interval: "weekly" day: "monday" @@ -37,186 +30,28 @@ updates: - dependency-name: "*" update-types: ["version-update:semver-patch"] - # Maintain dependencies for Python Packages - - package-ecosystem: "pip" - directory: "/demo" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - ignore: - - dependency-name: "*" - update-types: ["version-update:semver-patch"] - - # Maintain dependencies for Python Packages - - package-ecosystem: "pip" - directory: "/docs" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - ignore: - - dependency-name: "*" - update-types: ["version-update:semver-patch"] - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/.devcontainer" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/docker-agent" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/docker-test/db" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/elk-stack/elasticsearch" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/elk-stack/extensions/curator" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/elk-stack/extensions/enterprise-search" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/elk-stack/extensions/filebeat" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/elk-stack/extensions/fleet" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/elk-stack/extensions/heartbeat" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/elk-stack/extensions/logspout" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/elk-stack/extensions/metricbeat" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/elk-stack/kibana" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/elk-stack/logstash" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/elk-stack/setup" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/multi-demo" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/playground" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - - # Maintain dependencies for docker - - package-ecosystem: "docker" - directory: "/demo/playground/examples" - schedule: - interval: "weekly" - day: "monday" - time: "04:00" - timezone: "Canada/Pacific" - # Maintain dependencies for docker - package-ecosystem: "docker" - directory: "/docker" + directories: + - "/.devcontainer" + - "/demo/docker-agent" + - "/demo/docker-test/db" + - "/demo/elk-stack/elasticsearch" + - "/demo/elk-stack/extensions/curator" + - "/demo/elk-stack/extensions/enterprise-search" + - "/demo/elk-stack/extensions/filebeat" + - "/demo/elk-stack/extensions/fleet" + - "/demo/elk-stack/extensions/heartbeat" + - "/demo/elk-stack/extensions/logspout" + - "/demo/elk-stack/extensions/metricbeat" + - "/demo/elk-stack/kibana" + - "/demo/elk-stack/logstash" + - "/demo/elk-stack/setup" + - "/demo/multi-demo" + - "/demo/playground" + - "/demo/playground/examples" + - "/docker" + - "/scenarios" schedule: interval: "weekly" day: "monday" From 7a401b4bfc8c69056268e677fb1848b78064512a Mon Sep 17 00:00:00 2001 From: StepSecurity Bot Date: Wed, 23 Apr 2025 10:12:08 -0700 Subject: [PATCH 19/32] :lock: ci: Harden GitHub Actions (#3670) Signed-off-by: StepSecurity Bot Co-authored-by: Stephen Curran --- .github/workflows/codeql.yml | 3 +++ .github/workflows/publish.yml | 3 +++ .github/workflows/pythonpublish.yml | 3 +++ .github/workflows/snyk-lts.yml | 6 ++++++ .github/workflows/snyk.yml | 6 ++++++ 5 files changed, 21 insertions(+) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 49ebb1204e..7890761d49 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -6,6 +6,9 @@ name: "Code scanning - action" schedule: - cron: "0 19 * * 0" +permissions: + contents: read + jobs: CodeQL-Build: # CodeQL runs on ubuntu-latest and windows-latest diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index a759bf8f2d..dc73b56eb2 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -23,6 +23,9 @@ on: required: false type: string +permissions: + contents: read + jobs: build-image: strategy: diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index f7d5ffdb12..0663e4d8ed 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -4,6 +4,9 @@ on: release: types: [created] +permissions: + contents: read + jobs: deploy: runs-on: ubuntu-latest diff --git a/.github/workflows/snyk-lts.yml b/.github/workflows/snyk-lts.yml index b26a8371c0..3be449a16f 100644 --- a/.github/workflows/snyk-lts.yml +++ b/.github/workflows/snyk-lts.yml @@ -11,8 +11,14 @@ on: - acapy_agent/** - docker/** +permissions: + contents: read + jobs: snyk: + permissions: + contents: read # for actions/checkout to fetch code + security-events: write # for github/codeql-action/upload-sarif to upload SARIF results runs-on: ubuntu-latest if: ${{ github.repository_owner == 'openwallet-foundation' }} steps: diff --git a/.github/workflows/snyk.yml b/.github/workflows/snyk.yml index 778c019c77..da7ffd6e52 100644 --- a/.github/workflows/snyk.yml +++ b/.github/workflows/snyk.yml @@ -7,8 +7,14 @@ on: - acapy_agent/** - docker/** +permissions: + contents: read + jobs: snyk: + permissions: + contents: read # for actions/checkout to fetch code + security-events: write # for github/codeql-action/upload-sarif to upload SARIF results runs-on: ubuntu-latest if: ${{ github.repository_owner == 'openwallet-foundation' }} steps: From 43175240f13665a9edeabf0079e6bff10bf89c6b Mon Sep 17 00:00:00 2001 From: Mourits de Beer <31511766+ff137@users.noreply.github.com> Date: Wed, 23 Apr 2025 20:11:34 +0200 Subject: [PATCH 20/32] :lock: Update Token Permissions in GitHub Actions (#3678) * :lock: Update Token Permissions in GitHub Actions Signed-off-by: ff137 * :lock: Move packages: write permission to job level Signed-off-by: ff137 --------- Signed-off-by: ff137 --- .github/workflows/bdd-integration-tests.yml | 5 +++++ .github/workflows/bdd-interop-tests.yml | 5 +++++ .github/workflows/format.yml | 5 +++++ .github/workflows/nightly.yml | 7 +++++++ .github/workflows/pr-tests.yml | 5 +++++ .github/workflows/publish-docs.yml | 4 +++- .github/workflows/scenario-integration-tests.yml | 5 +++++ .github/workflows/sonar-merge-main.yml | 4 ++++ .github/workflows/sonar-pr.yml | 5 +++++ 9 files changed, 44 insertions(+), 1 deletion(-) diff --git a/.github/workflows/bdd-integration-tests.yml b/.github/workflows/bdd-integration-tests.yml index 207763804a..1c2db60534 100644 --- a/.github/workflows/bdd-integration-tests.yml +++ b/.github/workflows/bdd-integration-tests.yml @@ -9,6 +9,11 @@ on: - main types: [opened, synchronize, reopened, ready_for_review] +permissions: + contents: read + pull-requests: read + checks: write + concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true diff --git a/.github/workflows/bdd-interop-tests.yml b/.github/workflows/bdd-interop-tests.yml index f82e2f5517..b136a3809a 100644 --- a/.github/workflows/bdd-interop-tests.yml +++ b/.github/workflows/bdd-interop-tests.yml @@ -9,6 +9,11 @@ on: - main types: [opened, synchronize, reopened, ready_for_review] +permissions: + contents: read + pull-requests: read + checks: write + concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index 8185571eba..f57ba0c7a4 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -5,6 +5,11 @@ name: Ruff Code Formatter and Linting Check branches: - main +permissions: + contents: read + pull-requests: read + checks: write + jobs: lint: name: lint diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 29153d51a4..4896c6a733 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -5,6 +5,11 @@ on: - cron: "0 0 * * *" workflow_dispatch: +permissions: + contents: read + pull-requests: read + checks: write + jobs: tests: if: github.repository_owner == 'openwallet-foundation' || github.event_name == 'workflow_dispatch' @@ -54,5 +59,7 @@ jobs: strategy: matrix: tag: ["nightly-${{needs.setup_and_check_pub.outputs.date}}", nightly] + permissions: + packages: write with: tag: ${{ matrix.tag }} diff --git a/.github/workflows/pr-tests.yml b/.github/workflows/pr-tests.yml index 44a6aac572..f90ea3387f 100644 --- a/.github/workflows/pr-tests.yml +++ b/.github/workflows/pr-tests.yml @@ -3,6 +3,11 @@ name: PR Tests on: pull_request: +permissions: + contents: read + pull-requests: read + checks: write + concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index 0dcf9e1c1a..7268cc8df0 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -8,11 +8,13 @@ on: - docs-v* permissions: - contents: write + contents: read jobs: deploy: runs-on: ubuntu-latest + permissions: + contents: write steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: diff --git a/.github/workflows/scenario-integration-tests.yml b/.github/workflows/scenario-integration-tests.yml index afaf520d24..5008a3947d 100644 --- a/.github/workflows/scenario-integration-tests.yml +++ b/.github/workflows/scenario-integration-tests.yml @@ -9,6 +9,11 @@ on: - main types: [opened, synchronize, reopened, ready_for_review] +permissions: + contents: read + pull-requests: read + checks: write + concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true diff --git a/.github/workflows/sonar-merge-main.yml b/.github/workflows/sonar-merge-main.yml index a2340cf709..3e1f97ccfd 100644 --- a/.github/workflows/sonar-merge-main.yml +++ b/.github/workflows/sonar-merge-main.yml @@ -4,6 +4,10 @@ on: branches: - main +permissions: + contents: read + checks: write + jobs: sonarcloud: name: SonarCloud diff --git a/.github/workflows/sonar-pr.yml b/.github/workflows/sonar-pr.yml index 4bbcc04619..175176126a 100644 --- a/.github/workflows/sonar-pr.yml +++ b/.github/workflows/sonar-pr.yml @@ -6,6 +6,11 @@ on: types: - completed +permissions: + contents: read + pull-requests: read + checks: write + jobs: SonarCloud: runs-on: ubuntu-latest From c4e52047c3dfcb4023eb3d338a66a20fc326b7af Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Apr 2025 17:28:46 -0700 Subject: [PATCH 21/32] chore(deps): Bump pydantic from 2.10.3 to 2.11.3 in /scenarios (#3677) --- scenarios/poetry.lock | 288 +++++++++++++++++++++++---------------- scenarios/pyproject.toml | 2 +- 2 files changed, 171 insertions(+), 119 deletions(-) diff --git a/scenarios/poetry.lock b/scenarios/poetry.lock index 3a6524d48f..d86151d2ac 100644 --- a/scenarios/poetry.lock +++ b/scenarios/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "acapy-controller" @@ -6,6 +6,7 @@ version = "0.2.0" description = "ACA-Py Controller" optional = false python-versions = "^3.10" +groups = ["main"] files = [] develop = false @@ -29,6 +30,7 @@ version = "2.4.4" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8"}, {file = "aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745"}, @@ -40,6 +42,7 @@ version = "3.11.10" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cbad88a61fa743c5d283ad501b01c153820734118b65aee2bd7dbb735475ce0d"}, {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80886dac673ceaef499de2f393fc80bb4481a129e6cb29e624a12e3296cc088f"}, @@ -130,7 +133,7 @@ propcache = ">=0.2.0" yarl = ">=1.17.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] [[package]] name = "aiosignal" @@ -138,6 +141,7 @@ version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, @@ -152,6 +156,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -163,6 +168,7 @@ version = "0.1.1" description = "Async queue with selective retrieval" optional = false python-versions = "<4.0,>=3.9" +groups = ["main"] files = [ {file = "async_selective_queue-0.1.1-py3-none-any.whl", hash = "sha256:b06af83a09d0fbfc1ac115f68e7e6b879823de71a1749e4a439f44903ae9cfb7"}, {file = "async_selective_queue-0.1.1.tar.gz", hash = "sha256:f59e78ef1703a8781ecd1fff103f4692b3fb8885419430b7c735ee45e5909632"}, @@ -174,6 +180,8 @@ version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.11\"" files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, @@ -185,18 +193,19 @@ version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\""] [[package]] name = "blessings" @@ -204,6 +213,7 @@ version = "1.7" description = "A thin, practical wrapper around terminal coloring, styling, and positioning" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "blessings-1.7-py2-none-any.whl", hash = "sha256:caad5211e7ba5afe04367cdd4cfc68fa886e2e08f6f35e76b7387d2109ccea6e"}, {file = "blessings-1.7-py3-none-any.whl", hash = "sha256:b1fdd7e7a675295630f9ae71527a8ebc10bfefa236b3d6aa4932ee4462c17ba3"}, @@ -219,6 +229,7 @@ version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, @@ -230,6 +241,7 @@ version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" +groups = ["main"] files = [ {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, @@ -344,6 +356,8 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -355,6 +369,7 @@ version = "7.1.0" description = "A Python library for the Docker Engine API." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, @@ -377,6 +392,8 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -391,6 +408,7 @@ version = "1.5.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, @@ -492,6 +510,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -506,6 +525,7 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -517,6 +537,7 @@ version = "6.1.0" description = "multidict implementation" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, @@ -621,6 +642,7 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -632,6 +654,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -647,6 +670,7 @@ version = "0.2.1" description = "Accelerated property cache" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"}, {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"}, @@ -734,131 +758,133 @@ files = [ [[package]] name = "pydantic" -version = "2.10.3" +version = "2.11.3" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, - {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, + {file = "pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f"}, + {file = "pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.27.1" +pydantic-core = "2.33.1" typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" -version = "2.27.1" +version = "2.33.1" description = "Core functionality for Pydantic validation and serialization" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, - {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, - {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, - {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, - {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, - {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, - {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, - {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, - {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, - {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, - {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, - {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, - {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, - {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, - {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, - {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, - {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3077cfdb6125cc8dab61b155fdd714663e401f0e6883f9632118ec12cf42df26"}, + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ffab8b2908d152e74862d276cf5017c81a2f3719f14e8e3e8d6b83fda863927"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5183e4f6a2d468787243ebcd70cf4098c247e60d73fb7d68d5bc1e1beaa0c4db"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:398a38d323f37714023be1e0285765f0a27243a8b1506b7b7de87b647b517e48"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87d3776f0001b43acebfa86f8c64019c043b55cc5a6a2e313d728b5c95b46969"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c566dd9c5f63d22226409553531f89de0cac55397f2ab8d97d6f06cfce6d947e"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d5f3acc81452c56895e90643a625302bd6be351e7010664151cc55b7b97f89"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d3a07fadec2a13274a8d861d3d37c61e97a816beae717efccaa4b36dfcaadcde"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f99aeda58dce827f76963ee87a0ebe75e648c72ff9ba1174a253f6744f518f65"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:902dbc832141aa0ec374f4310f1e4e7febeebc3256f00dc359a9ac3f264a45dc"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fe44d56aa0b00d66640aa84a3cbe80b7a3ccdc6f0b1ca71090696a6d4777c091"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win32.whl", hash = "sha256:ed3eb16d51257c763539bde21e011092f127a2202692afaeaccb50db55a31383"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win_amd64.whl", hash = "sha256:694ad99a7f6718c1a498dc170ca430687a39894a60327f548e02a9c7ee4b6504"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e966fc3caaf9f1d96b349b0341c70c8d6573bf1bac7261f7b0ba88f96c56c24"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bfd0adeee563d59c598ceabddf2c92eec77abcb3f4a391b19aa7366170bd9e30"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91815221101ad3c6b507804178a7bb5cb7b2ead9ecd600041669c8d805ebd595"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9fea9c1869bb4742d174a57b4700c6dadea951df8b06de40c2fedb4f02931c2e"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d20eb4861329bb2484c021b9d9a977566ab16d84000a57e28061151c62b349a"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb935c5591573ae3201640579f30128ccc10739b45663f93c06796854405505"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c964fd24e6166420d18fb53996d8c9fd6eac9bf5ae3ec3d03015be4414ce497f"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:681d65e9011f7392db5aa002b7423cc442d6a673c635668c227c6c8d0e5a4f77"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e100c52f7355a48413e2999bfb4e139d2977a904495441b374f3d4fb4a170961"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:048831bd363490be79acdd3232f74a0e9951b11b2b4cc058aeb72b22fdc3abe1"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bdc84017d28459c00db6f918a7272a5190bec3090058334e43a76afb279eac7c"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win32.whl", hash = "sha256:32cd11c5914d1179df70406427097c7dcde19fddf1418c787540f4b730289896"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_amd64.whl", hash = "sha256:2ea62419ba8c397e7da28a9170a16219d310d2cf4970dbc65c32faf20d828c83"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_arm64.whl", hash = "sha256:fc903512177361e868bc1f5b80ac8c8a6e05fcdd574a5fb5ffeac5a9982b9e89"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5ab77f45d33d264de66e1884fca158bc920cb5e27fd0764a72f72f5756ae8bdb"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7aaba1b4b03aaea7bb59e1b5856d734be011d3e6d98f5bcaa98cb30f375f2ad"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fb66263e9ba8fea2aa85e1e5578980d127fb37d7f2e292773e7bc3a38fb0c7b"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f2648b9262607a7fb41d782cc263b48032ff7a03a835581abbf7a3bec62bcf5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:723c5630c4259400818b4ad096735a829074601805d07f8cafc366d95786d331"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d100e3ae783d2167782391e0c1c7a20a31f55f8015f3293647544df3f9c67824"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177d50460bc976a0369920b6c744d927b0ecb8606fb56858ff542560251b19e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3edde68d1a1f9af1273b2fe798997b33f90308fb6d44d8550c89fc6a3647cf6"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a62c3c3ef6a7e2c45f7853b10b5bc4ddefd6ee3cd31024754a1a5842da7d598d"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:c91dbb0ab683fa0cd64a6e81907c8ff41d6497c346890e26b23de7ee55353f96"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f466e8bf0a62dc43e068c12166281c2eca72121dd2adc1040f3aa1e21ef8599"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win32.whl", hash = "sha256:ab0277cedb698749caada82e5d099dc9fed3f906a30d4c382d1a21725777a1e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win_amd64.whl", hash = "sha256:5773da0ee2d17136b1f1c6fbde543398d452a6ad2a7b54ea1033e2daa739b8d2"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c834f54f8f4640fd7e4b193f80eb25a0602bba9e19b3cd2fc7ffe8199f5ae02"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:049e0de24cf23766f12cc5cc71d8abc07d4a9deb9061b334b62093dedc7cb068"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a28239037b3d6f16916a4c831a5a0eadf856bdd6d2e92c10a0da3a59eadcf3e"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d3da303ab5f378a268fa7d45f37d7d85c3ec19769f28d2cc0c61826a8de21fe"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25626fb37b3c543818c14821afe0fd3830bc327a43953bc88db924b68c5723f1"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3ab2d36e20fbfcce8f02d73c33a8a7362980cff717926bbae030b93ae46b56c7"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:2f9284e11c751b003fd4215ad92d325d92c9cb19ee6729ebd87e3250072cdcde"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:048c01eee07d37cbd066fc512b9d8b5ea88ceeb4e629ab94b3e56965ad655add"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5ccd429694cf26af7997595d627dd2637e7932214486f55b8a357edaac9dae8c"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3a371dc00282c4b84246509a5ddc808e61b9864aa1eae9ecc92bb1268b82db4a"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f59295ecc75a1788af8ba92f2e8c6eeaa5a94c22fc4d151e8d9638814f85c8fc"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08530b8ac922003033f399128505f513e30ca770527cc8bbacf75a84fcc2c74b"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae370459da6a5466978c0eacf90690cb57ec9d533f8e63e564ef3822bfa04fe"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3de2777e3b9f4d603112f78006f4ae0acb936e95f06da6cb1a45fbad6bdb4b5"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a64e81e8cba118e108d7126362ea30e021291b7805d47e4896e52c791be2761"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:52928d8c1b6bda03cc6d811e8923dffc87a2d3c8b3bfd2ce16471c7147a24850"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1b30d92c9412beb5ac6b10a3eb7ef92ccb14e3f2a8d7732e2d739f58b3aa7544"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f995719707e0e29f0f41a8aa3bcea6e761a36c9136104d3189eafb83f5cec5e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7edbc454a29fc6aeae1e1eecba4f07b63b8d76e76a748532233c4c167b4cb9ea"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ad05b683963f69a1d5d2c2bdab1274a31221ca737dbbceaa32bcb67359453cdd"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df6a94bf9452c6da9b5d76ed229a5683d0306ccb91cca8e1eea883189780d568"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7965c13b3967909a09ecc91f21d09cfc4576bf78140b988904e94f130f188396"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3f1fdb790440a34f6ecf7679e1863b825cb5ffde858a9197f851168ed08371e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5277aec8d879f8d05168fdd17ae811dd313b8ff894aeeaf7cd34ad28b4d77e33"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8ab581d3530611897d863d1a649fb0644b860286b4718db919bfd51ece41f10b"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0483847fa9ad5e3412265c1bd72aad35235512d9ce9d27d81a56d935ef489672"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:de9e06abe3cc5ec6a2d5f75bc99b0bdca4f5c719a5b34026f8c57efbdecd2ee3"}, + {file = "pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df"}, ] [package.dependencies] @@ -870,6 +896,7 @@ version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -892,6 +919,7 @@ version = "0.23.8" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, @@ -910,6 +938,8 @@ version = "308" description = "Python for Window Extensions" optional = false python-versions = "*" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, @@ -937,6 +967,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -958,6 +989,7 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -969,6 +1001,8 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.11\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -1010,24 +1044,41 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "typing-inspection" +version = "0.4.0" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, + {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + [[package]] name = "urllib3" version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -1038,6 +1089,7 @@ version = "1.18.3" description = "Yet another URL library" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, @@ -1129,6 +1181,6 @@ multidict = ">=4.0" propcache = ">=0.2.0" [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.10" -content-hash = "55d68b2c393cedba85a71e3adb0349c627f436aacb06bd7682bcd459416869e1" +content-hash = "43e080b9604689150145f4ed974571cb1935792cfe01673cef0281404e319bf2" diff --git a/scenarios/pyproject.toml b/scenarios/pyproject.toml index d9f76270a1..c2a165903a 100644 --- a/scenarios/pyproject.toml +++ b/scenarios/pyproject.toml @@ -12,7 +12,7 @@ acapy-controller = {git = "https://github.com/indicio-tech/acapy-minimal-example docker = "7.1.0" pytest = "^8.3.2" pytest-asyncio = "^0.23.8" -pydantic = "^2.8.2" +pydantic = "^2.11.3" [tool.pytest.ini_options] markers = "examples: test the examples" From 476326a82eb1adef8a35caffbe2f4a0990506283 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Apr 2025 19:03:51 -0700 Subject: [PATCH 22/32] chore(deps): Bump pytest-asyncio from 0.23.8 to 0.26.0 in /scenarios (#3676) --- scenarios/poetry.lock | 14 +++++++------- scenarios/pyproject.toml | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/scenarios/poetry.lock b/scenarios/poetry.lock index d86151d2ac..276307c13e 100644 --- a/scenarios/poetry.lock +++ b/scenarios/poetry.lock @@ -915,21 +915,21 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments [[package]] name = "pytest-asyncio" -version = "0.23.8" +version = "0.26.0" description = "Pytest support for asyncio" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, - {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, + {file = "pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0"}, + {file = "pytest_asyncio-0.26.0.tar.gz", hash = "sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f"}, ] [package.dependencies] -pytest = ">=7.0.0,<9" +pytest = ">=8.2,<9" [package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] @@ -1183,4 +1183,4 @@ propcache = ">=0.2.0" [metadata] lock-version = "2.1" python-versions = "^3.10" -content-hash = "43e080b9604689150145f4ed974571cb1935792cfe01673cef0281404e319bf2" +content-hash = "8be946dfd9b4b99ac4a37e3eed8db1a742ae9d2b621104420439d8b05d3de017" diff --git a/scenarios/pyproject.toml b/scenarios/pyproject.toml index c2a165903a..8adccc61f8 100644 --- a/scenarios/pyproject.toml +++ b/scenarios/pyproject.toml @@ -11,7 +11,7 @@ python = "^3.10" acapy-controller = {git = "https://github.com/indicio-tech/acapy-minimal-example.git", rev = "main"} docker = "7.1.0" pytest = "^8.3.2" -pytest-asyncio = "^0.23.8" +pytest-asyncio = "^0.26.0" pydantic = "^2.11.3" [tool.pytest.ini_options] From 4898cce1b28fc619a5c6a2b8126f29f183cac622 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 24 Apr 2025 02:10:12 +0000 Subject: [PATCH 23/32] chore(deps): Bump github/codeql-action in the all-actions group (#3675) --- .github/workflows/codeql.yml | 4 ++-- .github/workflows/scorecard.yml | 2 +- .github/workflows/snyk-lts.yml | 2 +- .github/workflows/snyk.yml | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 7890761d49..4e95f9cc57 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,9 +24,9 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@45775bd8235c68ba998cffa5171334d58593da47 # v3.28.15 + uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@45775bd8235c68ba998cffa5171334d58593da47 # v3.28.15 + uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16 diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 84f1670cf8..7d939213be 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -71,6 +71,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard (optional). # Commenting out will disable upload of results to your repo's Code Scanning dashboard - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@45775bd8235c68ba998cffa5171334d58593da47 # v3.28.15 + uses: github/codeql-action/upload-sarif@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16 with: sarif_file: results.sarif \ No newline at end of file diff --git a/.github/workflows/snyk-lts.yml b/.github/workflows/snyk-lts.yml index 3be449a16f..129af25f40 100644 --- a/.github/workflows/snyk-lts.yml +++ b/.github/workflows/snyk-lts.yml @@ -52,6 +52,6 @@ jobs: sed -i 's/"security-severity": "null"/"security-severity": "0"/g' snyk.sarif - name: Upload result to GitHub Code Scanning - uses: github/codeql-action/upload-sarif@45775bd8235c68ba998cffa5171334d58593da47 # v3.28.15 + uses: github/codeql-action/upload-sarif@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16 with: sarif_file: snyk.sarif diff --git a/.github/workflows/snyk.yml b/.github/workflows/snyk.yml index da7ffd6e52..8b132f3819 100644 --- a/.github/workflows/snyk.yml +++ b/.github/workflows/snyk.yml @@ -45,6 +45,6 @@ jobs: sed -i 's/"security-severity": "null"/"security-severity": "0"/g' snyk.sarif - name: Upload result to GitHub Code Scanning - uses: github/codeql-action/upload-sarif@45775bd8235c68ba998cffa5171334d58593da47 # v3.28.15 + uses: github/codeql-action/upload-sarif@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16 with: sarif_file: snyk.sarif From f685116160985e294f57f9adb5d2e2fc71fbae57 Mon Sep 17 00:00:00 2001 From: Mourits de Beer <31511766+ff137@users.noreply.github.com> Date: Sun, 27 Apr 2025 16:38:02 +0200 Subject: [PATCH 24/32] :bug: Fix permissions in nightly publish job (#3682) --- .github/workflows/nightly.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 4896c6a733..1ac803a373 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -60,6 +60,7 @@ jobs: matrix: tag: ["nightly-${{needs.setup_and_check_pub.outputs.date}}", nightly] permissions: + contents: read packages: write with: tag: ${{ matrix.tag }} From 13eb00b5f801ba532a8d0b4e1e4a802c3856d52d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Apr 2025 09:15:33 -0700 Subject: [PATCH 25/32] chore(deps-dev): Bump pydevd-pycharm from 251.23774.211 to 251.25410.24 (#3683) Bumps [pydevd-pycharm](https://github.com/JetBrains/intellij-community) from 251.23774.211 to 251.25410.24. - [Commits](https://github.com/JetBrains/intellij-community/compare/pycharm/251.23774.211...pycharm/251.25410.24) --- updated-dependencies: - dependency-name: pydevd-pycharm dependency-version: 251.25410.24 dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7c6dfd89cd..6c1dc39ed0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2263,13 +2263,13 @@ files = [ [[package]] name = "pydevd-pycharm" -version = "251.23774.211" +version = "251.25410.24" description = "PyCharm Debugger (used in PyCharm and PyDev)" optional = false python-versions = "*" groups = ["dev"] files = [ - {file = "pydevd_pycharm-251.23774.211.tar.gz", hash = "sha256:82173214c3f4b12c6e738ccb7406cba312714a5ff46c76682d7880e338b3a9e5"}, + {file = "pydevd_pycharm-251.25410.24.tar.gz", hash = "sha256:54d5995c445a6a1639261589be822c88aa68ceb84c75751ff3d11e39ada19cbd"}, ] [[package]] From 7a4794f76cad994a4cc876c0230cefacc36bf627 Mon Sep 17 00:00:00 2001 From: Stephen Curran Date: Mon, 28 Apr 2025 11:15:51 -0700 Subject: [PATCH 26/32] 1.3.0rc2 (#3687) Signed-off-by: Stephen Curran --- CHANGELOG.md | 70 +++++++++++++--- Managing-ACA-Py-Doc-Site.md | 2 +- PUBLISHING.md | 4 +- ...ion.md => AnonCredsControllerMigration.md} | 0 docs/features/SupportedRFCs.md | 2 +- mkdocs.yml | 2 +- open-api/openapi.json | 82 +++++++++---------- open-api/swagger.json | 78 +++++++++--------- pyproject.toml | 2 +- 9 files changed, 144 insertions(+), 98 deletions(-) rename docs/deploying/{AnoncredsControllerMigration.md => AnonCredsControllerMigration.md} (100%) diff --git a/CHANGELOG.md b/CHANGELOG.md index d0a69e0d9b..aa5f9d9af2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,14 +1,28 @@ # Aries Cloud Agent Python Changelog -## 1.3.0rc1 +## 1.3.0rc2 -### April 3, 2025 +### April 28, 2025 -Release 1.3.0 is a significant release that adds many updates, fixes and an important breaking change (starting to remove support for [AIP 1.0] from ACA-Py) from the 1.2.LTS branch of ACA-Py. The full list of changes are in in the [categorized list of pull requests](#130-categorized-list-of-pull-requests) for the release. As always, ACA-Py remains fully up to date with its dependencies. Fixes and improvements focused around the latest wallet type (`askar-anoncreds`), AnonCreds processing in general, and AnonCreds revocation in particular. New to this release is a ACA-Py Helm Chart that can be used in deploying ACA-Py. +ACA-Py 1.3.0 introduces significant improvements across wallet types, AnonCreds support, multi-tenancy, DIDComm interoperability, developer experience, and software supply chain management. This release strengthens stability, modernizes protocol support, and delivers important updates for AnonCreds credential handling. A small number of breaking changes are included and are detailed below. + +Updates were made to to the `askar-anoncreds` wallet type ([Askar](https://github.com/openwallet-foundation/askar) plus the latest [AnonCreds Rust](https://github.com/hyperledger/anoncreds-rs) library), addressing issues with multi-ledger configurations, multitenant deployments, and credential handling across different wallet types. Wallet profile management was strengthened by enforcing unique names to avoid conflicts in multitenant environments. + +AnonCreds handling saw extensive refinements, including fixes to credential issuance, revocation management, and proof presentation workflows. The release also introduces support for `did:indy` Transaction Version 2 and brings better alignment between the ledger API responses and the expected schemas. Several API documentation updates and improvements to type hints further enhance the developer experience when working with AnonCreds features. + +Support for multi-tenancy continues to mature, with fixes that better isolate tenant wallets from the base wallet and improved connection reuse across tenants. + +Logging across ACA-Py has been significantly improved to deliver clearer, more actionable logs, while error handling was enhanced to provide better diagnostics for validation failures and resolver setup issues. + +Work toward broader interoperability continued, with the introduction of support for the [Verifiable Credentials Data Model (VCDM) 2.0](https://www.w3.org/TR/vc-data-model-2.0/), as well as enhancements to DIDDoc handling, including support for BLS12381G2 key types. A new DIDComm route for fetching existing invitations was added, and a number of minor protocol-level improvements were made to strengthen reliability. + +The release also includes many improvements for developers, including a new ACA-Py Helm Chart to simplify Kubernetes deployments, updated tutorials, and more updates to demos (such as [AliceGetsAPhone](https://aca-py.org/latest/demo/AliceGetsAPhone/)). Dependency upgrades across the project further solidify the platform for long-term use. + +Significant work was also done in this release to improve the security and integrity of ACA-Py's software supply chain. Updates to the CI/CD pipelines hardened GitHub Actions workflows, introduced pinned dependencies and digests for builds, optimized Dockerfile construction, and improved dependency management practices. These changes directly contribute to a stronger security posture and have improved [ACA-Py's OpenSSF Scorecard evaluation](https://scorecard.dev/viewer/?uri=github.com/openwallet-foundation/acapy), ensuring higher levels of trust and verifiability for those deploying ACA-Py in production environments. ### 1.3.0 Deprecation Notices -- In the next ACA-Py release, we will be dropping from the core ACA-Py repository the AIP 1.0 [RFC 0037 Issue Credentials v1.0] and [RFC 0037 Present Proof v1.0] DIDComm protocols. Each of the protocols will be moved to the [ACA-Py Plugins] repo. All ACA-Py implementers that use those protocols **SHOULD** update as soon as possible to the [AIP 2.0] versions of those protocols ([RFC 0453 Issue Credential v2.0] and [RFC 0454 Present Proof v2.0], respectively). Once the protocols are removed from ACA-Py, anyone still using those protocols **MUST** adjust their configuration to load those protocols from the respective plugins. +- In the next ACA-Py release, we will be dropping from the core ACA-Py repository the [AIP 1.0] [RFC 0037 Issue Credentials v1.0] and [RFC 0037 Present Proof v1.0] DIDComm protocols. Each of the protocols will be moved to the [ACA-Py Plugins] repo. All ACA-Py implementers that use those protocols **SHOULD** update as soon as possible to the [AIP 2.0] versions of those protocols ([RFC 0453 Issue Credential v2.0] and [RFC 0454 Present Proof v2.0], respectively). Once the protocols are removed from ACA-Py, anyone still using those protocols **MUST** adjust their configuration to load those protocols from the respective plugins. [ACA-Py Plugins]: https://plugins.aca-py.org [RFC 0160 Connections]: https://identity.foundation/aries-rfcs/latest/features/0160-connection-protocol/ @@ -24,15 +38,19 @@ Release 1.3.0 is a significant release that adds many updates, fixes and an impo ### 1.3.0 Breaking Changes -In this release, the DiDComm [RFC 0160 Connections] is removed, in favour of the newer, more complete [RFC 0434 Out of Band] and [RFC 0023 DID Exchange]. Those still requiring [RFC 0160 Connections] protocol support must update their startup parameters to include the [Connections Protocol Plugin]. See the documentation for details, but once the ACA-Py instance startup options are extended to include the Connections protocol plugin, Controllers using the Connections protocol should continue to work as they had been. That said, we highly recommend implementers move to the [RFC 0434 Out of Band] and [RFC 0023 DID Exchange] Protocols as soon as possible. +This release includes a small number of breaking changes: + +- The DIDComm [RFC 0160 Connections] protocol is removed, in favour of the newer, more complete [RFC 0434 Out of Band] and [RFC 0023 DID Exchange]. Those still requiring [RFC 0160 Connections] protocol support must update their startup parameters to include the [Connections Protocol Plugin]. See the documentation for details, but once the ACA-Py instance startup options are extended to include the Connections protocol plugin, Controllers using the Connections protocol should continue to work as they had been. That said, we highly recommend implementers seeking interoperability move to the [RFC 0434 Out of Band] and [RFC 0023 DID Exchange] Protocols as soon as possible. +- Schema objects related to `did:indy` operations have been renamed to improve clarity and consistency. Clients interacting with `did:indy` endpoints should review and adjust any schema validations or mappings in their applications. -### 1.3.0 ACA-Py Controller API Changes: +### 1.3.0 ACA-Py Controller API Changes -- Added: `did:indy` support, including a new `POST /did/indy/create` endpoint -- Routes that support pagination (such as endpoints for fetching connections or credential/presentation exchange records), now include `descending` as an optional query parameter. -- `validFrom` and `validUntil` added to the `Credential` and `VerifiableCredential` objects +- `did:indy` support added, including a new `POST /did/indy/create` endpoint. +- Routes that support pagination (such as endpoints for fetching connections or credential/presentation exchange records), now include `descending` as an optional query parameter and have deprecated the `count` and `start` query parameters in favor of the more standard `limit` and `offset` parameters. +- `validFrom` and `validUntil` added to the `Credential` and `VerifiableCredential` objects. +- For consistency (and developer sanity), all `Anoncreds` references in the ACA-Py codebase have been changed to the more common `AnonCreds` (see [PR \#3573](https://github.com/openwallet-foundation/acapy/pull/3573)). Controller references may have to be updated to reflect the update. -Specifics of the majority of the can be found by looking at the diffs for the `swagger.json` and `openapi.json` files that are part of the [1.3.0.rc Release Pull Request](https://github.com/openwallet-foundation/acapy/pull/3604). Later pull requests might introduce some additional changes. +Specifics of the majority of the changes can be found by looking at the diffs for the `swagger.json` and `openapi.json` files that are part of the [1.3.0 Release Pull Request](https://github.com/openwallet-foundation/acapy/pull/3604). Later pull requests might introduce some additional changes. ### 1.3.0 Categorized List of Pull Requests @@ -46,6 +64,10 @@ Specifics of the majority of the can be found by looking at the diffs for the `s - :art: Deprecate count/start query params and implement limit/offset [\#3208](https://github.com/openwallet-foundation/acapy/pull/3208) [ff137](https://github.com/ff137) - :sparkles: Add ordering options to askar scan and fetch_all methods [\#3173](https://github.com/openwallet-foundation/acapy/pull/3173) [ff137](https://github.com/ff137) - Updates/fixes to AnonCreds Processing + - :art: Fix swagger tag names for AnonCreds endpoints [\#3661](https://github.com/openwallet-foundation/acapy/pull/3661) [ff137](https://github.com/ff137) + - :art: Add type hints to anoncreds module [\#3652](https://github.com/openwallet-foundation/acapy/pull/3652) [ff137](https://github.com/ff137) + - :bug: Fix publishing all pending AnonCreds revocations [\#3626](https://github.com/openwallet-foundation/acapy/pull/3626) [ff137](https://github.com/ff137) + - :art: Rename Anoncreds to AnonCreds [\#3573](https://github.com/openwallet-foundation/acapy/pull/3573) [ff137](https://github.com/ff137) - :art: Use correct model for sending AnonCreds presentation [\#3618](https://github.com/openwallet-foundation/acapy/pull/3618) [ff137](https://github.com/ff137) - fix: align ledger config schema with API response [\#3615](https://github.com/openwallet-foundation/acapy/pull/3615) [MonolithicMonk](https://github.com/MonolithicMonk) - fix(ledger): correct response format for /ledger/get-write-ledgers endpoint [\#3613](https://github.com/openwallet-foundation/acapy/pull/3613) [MonolithicMonk](https://github.com/MonolithicMonk) @@ -66,16 +88,29 @@ Specifics of the majority of the can be found by looking at the diffs for the `s - fix: connection reuse with multi-tenancy [\#3543](https://github.com/openwallet-foundation/acapy/pull/3543) [dbluhm](https://github.com/dbluhm) - Remove base wallet type must be new wallet type restriction [\#3542](https://github.com/openwallet-foundation/acapy/pull/3542) [jamshale](https://github.com/jamshale) - Logging and Error Handling Updates and Fixes: + - :art: Replace print statements in Banner with info log [\#3643](https://github.com/openwallet-foundation/acapy/pull/3643) [ff137](https://github.com/ff137) + - :sparkles: Improve logging in core components [\#3332](https://github.com/openwallet-foundation/acapy/pull/3332) [ff137](https://github.com/ff137) - :art: Include the validation error in Unprocessable Entity reason [\#3517](https://github.com/openwallet-foundation/acapy/pull/3517) [ff137](https://github.com/ff137) - Catch and log universal resolver setup error [\#3511](https://github.com/openwallet-foundation/acapy/pull/3511) [jamshale](https://github.com/jamshale) - W3C Verifiable Credentials Support Updates and Fixes: - Add vcdm 2.0 model and context [\#3436](https://github.com/openwallet-foundation/acapy/pull/3436) [PatStLouis](https://github.com/PatStLouis) - DID Doc Handling Updates + - (fix) VM resolution strategy correction for embedded VMs [\#3665](https://github.com/openwallet-foundation/acapy/pull/3665) [gmulhearn](https://github.com/gmulhearn) + - :bug: Fix public did no longer being correctly configured [\#3646](https://github.com/openwallet-foundation/acapy/pull/3646) [ff137](https://github.com/ff137) + - :art: Add type hints to `messaging/jsonld` [\#3650](https://github.com/openwallet-foundation/acapy/pull/3650) [ff137](https://github.com/ff137) + - Add BLS12381G2 keys to multikey manager [\#3640](https://github.com/openwallet-foundation/acapy/pull/3640) [gmulhearn](https://github.com/gmulhearn) - (fix) VM resolution strategy correction [\#3622](https://github.com/openwallet-foundation/acapy/pull/3622) [gmulhearn](https://github.com/gmulhearn) - DIDComm Protocol Updates and Fixes: - Fetch existing invitation route [\#3572](https://github.com/openwallet-foundation/acapy/pull/3572) [PatStLouis](https://github.com/PatStLouis) - BREAKING: remove connection protocol [\#3184](https://github.com/openwallet-foundation/acapy/pull/3184) [dbluhm](https://github.com/dbluhm) +- Indy Ledger Handling Updates/Fixes + - :art: Make ledger config more readable [\#3664](https://github.com/openwallet-foundation/acapy/pull/3664) [ff137](https://github.com/ff137) + - :art: Rename did:indy create/response schema objects [\#3663](https://github.com/openwallet-foundation/acapy/pull/3663) [ff137](https://github.com/ff137) + - :sparkles: Don't shutdown on ledger error [\#3636](https://github.com/openwallet-foundation/acapy/pull/3636) [ff137](https://github.com/ff137) - Documentation and Tutorial Pull Requests: + - Use current version of aca-py in devcontainer [\#3638](https://github.com/openwallet-foundation/acapy/pull/3638) [esune](https://github.com/esune) + - Devcointainer and docs update [\#3629](https://github.com/openwallet-foundation/acapy/pull/3629) [esune](https://github.com/esune) + - AliceGetsAPhone demo works in local docker environment [\#3623](https://github.com/openwallet-foundation/acapy/pull/3623) [davidchaiken](https://github.com/davidchaiken) - feat(demo): remove broken aip 10 and fix aip 20 [\#3611](https://github.com/openwallet-foundation/acapy/pull/3611) [davidchaiken](https://github.com/davidchaiken) - Fix demo implementation of vc_di cred issue [\#3609](https://github.com/openwallet-foundation/acapy/pull/3609) [ianco](https://github.com/ianco) - chore(demo): remove aip 10 code [\#3619](https://github.com/openwallet-foundation/acapy/pull/3619) [davidchaiken](https://github.com/davidchaiken) @@ -87,7 +122,16 @@ Specifics of the majority of the can be found by looking at the diffs for the `s - Create ReuseConnection.md [\#3534](https://github.com/openwallet-foundation/acapy/pull/3534) [MonolithicMonk](https://github.com/MonolithicMonk) - :white_check_mark: Fix demo playground example tests [\#3531](https://github.com/openwallet-foundation/acapy/pull/3531) [ff137](https://github.com/ff137) - :arrow_up: Upgrade sphinx versions in docs [\#3530](https://github.com/openwallet-foundation/acapy/pull/3530) [ff137](https://github.com/ff137) -- ACA-Py Testing Pull Requests: +- ACA-Py Testing and CI/CD Pull Requests: + - :bug: Fix permissions in nightly publish job [\#3682](https://github.com/openwallet-foundation/acapy/pull/3682) [ff137](https://github.com/ff137) + - :lock: Update Token Permissions in GitHub Actions [\#3678](https://github.com/openwallet-foundation/acapy/pull/3678) [ff137](https://github.com/ff137) + - :lock: ci: Harden GitHub Actions [\#3670](https://github.com/openwallet-foundation/acapy/pull/3670) [step-security-bot](https://github.com/step-security-bot) + - :construction_worker: Update dependabot file [\#3669](https://github.com/openwallet-foundation/acapy/pull/3669) [ff137](https://github.com/ff137) + - :pushpin: Pin Actions to a full length commit SHA and image tags to digests [\#3668](https://github.com/openwallet-foundation/acapy/pull/3668) [step-security-bot](https://github.com/step-security-bot) + - :test_tube: Fix test warnings [\#3656](https://github.com/openwallet-foundation/acapy/pull/3656) [ff137](https://github.com/ff137) + - :construction_worker: :technologist: Optimize Docker build to reduce cache invalidation [\#3655](https://github.com/openwallet-foundation/acapy/pull/3655) [rblaine95](https://github.com/rblaine95) + - 👷 Split Docker Builds [\#3654](https://github.com/openwallet-foundation/acapy/pull/3654) [rblaine95](https://github.com/rblaine95) + - :construction_worker: Fix Docker Caching [\#3653](https://github.com/openwallet-foundation/acapy/pull/3653) [rblaine95](https://github.com/rblaine95) - Repair BDD integration release tests [\#3605](https://github.com/openwallet-foundation/acapy/pull/3605) [jamshale](https://github.com/jamshale) - Indicate when interop tests fail [\#3592](https://github.com/openwallet-foundation/acapy/pull/3592) [jamshale](https://github.com/jamshale) - :zap: Automatically use pytest-xdist to run tests in parallel [\#3574](https://github.com/openwallet-foundation/acapy/pull/3574) [ff137](https://github.com/ff137) @@ -96,6 +140,7 @@ Specifics of the majority of the can be found by looking at the diffs for the `s - :heavy_plus_sign: Re-add `git` to Dockerfile [\#3515](https://github.com/openwallet-foundation/acapy/pull/3515) [ff137](https://github.com/ff137) - Restore connection route tests [\#3461](https://github.com/openwallet-foundation/acapy/pull/3461) [dbluhm](https://github.com/dbluhm) - Dependency Management pull requests (other than Dependabot): + - :arrow_up: Weekly dependency updates [\#3634](https://github.com/openwallet-foundation/acapy/pull/3634) [ff137](https://github.com/ff137) - Upgrade docker images to release 1.2.4 [\#3597](https://github.com/openwallet-foundation/acapy/pull/3597) [jamshale](https://github.com/jamshale) - Update changed-files to non vulnerable version [\#3591](https://github.com/openwallet-foundation/acapy/pull/3591) [ryjones](https://github.com/ryjones) - :arrow_up: Update lock file [\#3590](https://github.com/openwallet-foundation/acapy/pull/3590) [ff137](https://github.com/ff137) @@ -109,10 +154,11 @@ Specifics of the majority of the can be found by looking at the diffs for the `s - Update dockerfile image after release [\#3469](https://github.com/openwallet-foundation/acapy/pull/3469) [jamshale](https://github.com/jamshale) - :arrow_up: Upgrade dependencies [\#3455](https://github.com/openwallet-foundation/acapy/pull/3455) [ff137](https://github.com/ff137) - Release management pull requests: + - 1.3.0rc2 [\#3687](https://github.com/openwallet-foundation/acapy/pull/3687) [swcurran](https://github.com/swcurran) - 1.3.0rc1 [\#3628](https://github.com/openwallet-foundation/acapy/pull/3628) [swcurran](https://github.com/swcurran) - 1.3.0rc0 [\#3604](https://github.com/openwallet-foundation/acapy/pull/3604) [swcurran](https://github.com/swcurran) - Dependabot PRs - - [Link to list of Dependabot PRs in this release](https://github.com/openwallet-foundation/acapy/pulls?q=is%3Apr+is%3Amerged+merged%3A2025-01-21..2025-04-03+author%3Aapp%2Fdependabot+) + - [Link to list of Dependabot PRs in this release](https://github.com/openwallet-foundation/acapy/pulls?q=is%3Apr+is%3Amerged+merged%3A2025-01-21..2025-04-28+author%3Aapp%2Fdependabot+) ## 1.2.4 diff --git a/Managing-ACA-Py-Doc-Site.md b/Managing-ACA-Py-Doc-Site.md index db30a8db1b..256fbd83f7 100644 --- a/Managing-ACA-Py-Doc-Site.md +++ b/Managing-ACA-Py-Doc-Site.md @@ -97,7 +97,7 @@ To delete the documentation version, do the following: - Check your `git status` and make sure there are no changes in the branch -- e.g., new files that shouldn't be added to the `gh-pages` branch. If there are any -- delete the files so they are not added. -- Remove the folder for the RC. For example `rm -rf 1.3.0rc1` +- Remove the folder for the RC. For example `rm -rf 1.3.0rc2` - Edit the `versions.json` file and remove the reference to the RC release in the file. - Push the changes via a PR to the ACA-Py `gh-pages` branch (don't PR them into diff --git a/PUBLISHING.md b/PUBLISHING.md index 1fced634b3..5768587ab0 100644 --- a/PUBLISHING.md +++ b/PUBLISHING.md @@ -6,7 +6,7 @@ a major, minor or patch release, per [semver](https://semver.org/) rules. Once ready to do a release, create a local branch that includes the following updates: -1. Create a local PR branch from an updated `main` branch, e.g. "1.3.0rc1". +1. Create a local PR branch from an updated `main` branch, e.g. "1.3.0rc2". 2. See if there are any Document Site `mkdocs` changes needed. Run the script `./scripts/prepmkdocs.sh; mkdocs`. Watch the log, noting particularly if @@ -140,7 +140,7 @@ Once you have the list of PRs: [publish-indy.yml]: https://github.com/openwallet-foundation/acapy/blob/main/.github/workflows/publish-indy.yml 12. When a new release is tagged, create a new branch at the same commit with - the branch name in the format `docs-v`, for example, `docs-v1.3.0rc1`. + the branch name in the format `docs-v`, for example, `docs-v1.3.0rc2`. The creation of the branch triggers the execution of the [publish-docs] GitHub Action which generates the documentation for the new release, publishing it at [https://aca-py.org]. The GitHub Action also executes when diff --git a/docs/deploying/AnoncredsControllerMigration.md b/docs/deploying/AnonCredsControllerMigration.md similarity index 100% rename from docs/deploying/AnoncredsControllerMigration.md rename to docs/deploying/AnonCredsControllerMigration.md diff --git a/docs/features/SupportedRFCs.md b/docs/features/SupportedRFCs.md index a7082dffb3..a3ee41f5b6 100644 --- a/docs/features/SupportedRFCs.md +++ b/docs/features/SupportedRFCs.md @@ -8,7 +8,7 @@ ACA-Py or the repository `main` branch. Reminders (and PRs!) to update this page welcome! If you have any questions, please contact us on the #aries channel on [OpenWallet Foundation Discord](https://discord.gg/openwallet-foundation) or through an issue in this repo. -**Last Update**: 2025-04-03, Release 1.3.0rc1 +**Last Update**: 2025-04-28, Release 1.3.0rc2 > The checklist version of this document was created as a joint effort > between [Northern Block](https://northernblock.io/), [Animo Solutions](https://animo.id/) and the Ontario government, on behalf of the Ontario government. diff --git a/mkdocs.yml b/mkdocs.yml index 0a6ea6126c..4cd479b9e7 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -138,7 +138,7 @@ nav: - Upgrading ACA-Py: deploying/UpgradingACA-Py.md - Enabling BBS Signatures support: deploying/BBSSignatures.md - Indy SDK to Askar Migration: deploying/IndySDKtoAskarMigration.md - - Controller Migration to use AnonCreds Rust: deploying/AnonCredsControllerMigration.md + - Controller Migration to use the askar-anoncreds Wallet Type: deploying/AnonCredsControllerMigration.md - The Use of Poetry in ACA-Py: deploying/Poetry.md - ACA-Py Container Images: deploying/ContainerImagesAndGithubActions.md - Databases: deploying/Databases.md diff --git a/open-api/openapi.json b/open-api/openapi.json index f14a618e8e..1d05a57bc7 100644 --- a/open-api/openapi.json +++ b/open-api/openapi.json @@ -2,7 +2,7 @@ "openapi" : "3.0.1", "info" : { "title" : "Aries Cloud Agent", - "version" : "v1.3.0rc1" + "version" : "v1.3.0rc2" }, "servers" : [ { "url" : "/" @@ -2037,7 +2037,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/CreateRequest" + "$ref" : "#/components/schemas/CreateDidIndyRequest" } } }, @@ -2048,7 +2048,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/CreateResponse" + "$ref" : "#/components/schemas/CreateDidIndyResponse" } } }, @@ -8562,42 +8562,7 @@ }, "type" : "object" }, - "CreateKeyRequest" : { - "properties" : { - "alg" : { - "description" : "Which key algorithm to use.", - "example" : "ed25519", - "type" : "string" - }, - "kid" : { - "description" : "Optional kid to bind to the keypair, such as a verificationMethod.", - "example" : "did:web:example.com#key-01", - "type" : "string" - }, - "seed" : { - "description" : "Optional seed to generate the key pair. Must enable insecure wallet mode.", - "example" : "00000000000000000000000000000000", - "type" : "string" - } - }, - "type" : "object" - }, - "CreateKeyResponse" : { - "properties" : { - "kid" : { - "description" : "The associated kid", - "example" : "did:web:example.com#key-01", - "type" : "string" - }, - "multikey" : { - "description" : "The Public Key Multibase format (multikey)", - "example" : "z6MkgKA7yrw5kYSiDuQFcye4bMaJpcfHFry3Bx45pdWh3s8i", - "type" : "string" - } - }, - "type" : "object" - }, - "CreateRequest" : { + "CreateDidIndyRequest" : { "properties" : { "features" : { "additionalProperties" : { @@ -8611,7 +8576,7 @@ "additionalProperties" : { "type" : "object" }, - "description" : "Additional configuration options", + "description" : "Additional configuration options. Supported options: did, seed, key_type. Default key_type is ed25519.", "example" : { "did" : "did:indy:WRfXPg8dantKVubE3HX8pw", "key_type" : "ed25519", @@ -8622,7 +8587,7 @@ }, "type" : "object" }, - "CreateResponse" : { + "CreateDidIndyResponse" : { "properties" : { "did" : { "description" : "DID created", @@ -8637,6 +8602,41 @@ }, "type" : "object" }, + "CreateKeyRequest" : { + "properties" : { + "alg" : { + "description" : "Which key algorithm to use.", + "example" : "ed25519", + "type" : "string" + }, + "kid" : { + "description" : "Optional kid to bind to the keypair, such as a verificationMethod.", + "example" : "did:web:example.com#key-01", + "type" : "string" + }, + "seed" : { + "description" : "Optional seed to generate the key pair. Must enable insecure wallet mode.", + "example" : "00000000000000000000000000000000", + "type" : "string" + } + }, + "type" : "object" + }, + "CreateKeyResponse" : { + "properties" : { + "kid" : { + "description" : "The associated kid", + "example" : "did:web:example.com#key-01", + "type" : "string" + }, + "multikey" : { + "description" : "The Public Key Multibase format (multikey)", + "example" : "z6MkgKA7yrw5kYSiDuQFcye4bMaJpcfHFry3Bx45pdWh3s8i", + "type" : "string" + } + }, + "type" : "object" + }, "CreateWalletRequest" : { "properties" : { "extra_settings" : { diff --git a/open-api/swagger.json b/open-api/swagger.json index 5c7422c890..4c9c1cf58c 100644 --- a/open-api/swagger.json +++ b/open-api/swagger.json @@ -1,7 +1,7 @@ { "swagger" : "2.0", "info" : { - "version" : "v1.3.0rc1", + "version" : "v1.3.0rc2", "title" : "Aries Cloud Agent" }, "tags" : [ { @@ -1705,14 +1705,14 @@ "name" : "body", "required" : false, "schema" : { - "$ref" : "#/definitions/CreateRequest" + "$ref" : "#/definitions/CreateDidIndyRequest" } } ], "responses" : { "200" : { "description" : "", "schema" : { - "$ref" : "#/definitions/CreateResponse" + "$ref" : "#/definitions/CreateDidIndyResponse" } } } @@ -7163,6 +7163,42 @@ } } }, + "CreateDidIndyRequest" : { + "type" : "object", + "properties" : { + "features" : { + "type" : "object", + "example" : "{}", + "description" : "Additional features to enable for the did.", + "additionalProperties" : { } + }, + "options" : { + "type" : "object", + "example" : { + "did" : "did:indy:WRfXPg8dantKVubE3HX8pw", + "key_type" : "ed25519", + "seed" : "000000000000000000000000Trustee1" + }, + "description" : "Additional configuration options. Supported options: did, seed, key_type. Default key_type is ed25519.", + "additionalProperties" : { } + } + } + }, + "CreateDidIndyResponse" : { + "type" : "object", + "properties" : { + "did" : { + "type" : "string", + "example" : "did:indy:DFZgMggBEXcZFVQ2ZBTwdr", + "description" : "DID created" + }, + "verkey" : { + "type" : "string", + "example" : "BnSWTUQmdYCewSGFrRUhT6LmKdcCcSzRGqWXMPnEP168", + "description" : "Verification key" + } + } + }, "CreateKeyRequest" : { "type" : "object", "properties" : { @@ -7198,42 +7234,6 @@ } } }, - "CreateRequest" : { - "type" : "object", - "properties" : { - "features" : { - "type" : "object", - "example" : "{}", - "description" : "Additional features to enable for the did.", - "additionalProperties" : { } - }, - "options" : { - "type" : "object", - "example" : { - "did" : "did:indy:WRfXPg8dantKVubE3HX8pw", - "key_type" : "ed25519", - "seed" : "000000000000000000000000Trustee1" - }, - "description" : "Additional configuration options", - "additionalProperties" : { } - } - } - }, - "CreateResponse" : { - "type" : "object", - "properties" : { - "did" : { - "type" : "string", - "example" : "did:indy:DFZgMggBEXcZFVQ2ZBTwdr", - "description" : "DID created" - }, - "verkey" : { - "type" : "string", - "example" : "BnSWTUQmdYCewSGFrRUhT6LmKdcCcSzRGqWXMPnEP168", - "description" : "Verification key" - } - } - }, "CreateWalletRequest" : { "type" : "object", "properties" : { diff --git a/pyproject.toml b/pyproject.toml index a4206de299..7d9090a45a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "acapy_agent" -version = "1.3.0rc1" +version = "1.3.0rc2" description = "(ACA-Py) A Cloud Agent Python is a foundation for building decentralized identity applications and services running in non-mobile environments. " authors = [] license = "Apache-2.0" From 670669b1939eb86af18b30f9a514e90b43bc1315 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Apr 2025 11:29:07 -0700 Subject: [PATCH 27/32] chore(deps): Bump untergeek/curator (#3685) Bumps untergeek/curator from 8.0.16 to 8.0.21. --- updated-dependencies: - dependency-name: untergeek/curator dependency-version: 8.0.21 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: jamshale <31809382+jamshale@users.noreply.github.com> Co-authored-by: Stephen Curran --- demo/elk-stack/extensions/curator/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/demo/elk-stack/extensions/curator/Dockerfile b/demo/elk-stack/extensions/curator/Dockerfile index 69215e440f..0d87adb376 100644 --- a/demo/elk-stack/extensions/curator/Dockerfile +++ b/demo/elk-stack/extensions/curator/Dockerfile @@ -1,4 +1,4 @@ -FROM untergeek/curator:8.0.16@sha256:8ab15516eb320bddb042c6da3c81b57e4e69a7aac04efc32190db979fe0bfb5b +FROM untergeek/curator:8.0.21@sha256:eca135f7f6e1781cfbca9e7cb3fa8237a481cfd815aa7ef52b38adb268ab6f97 USER root From 544fcb736854400249b470d00cbaac916cab69d6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Apr 2025 10:42:19 +0000 Subject: [PATCH 28/32] chore(deps): Bump actions/setup-python in the all-actions group (#3688) --- .github/workflows/format.yml | 2 +- .github/workflows/publish-docs.yml | 2 +- .github/workflows/pythonpublish.yml | 2 +- .github/workflows/scenario-integration-tests.yml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index f57ba0c7a4..c80d20fd8e 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 + - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 with: python-version: "3.12" - name: Ruff Format and Lint Check diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index 7268cc8df0..da765e54ac 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -19,7 +19,7 @@ jobs: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 0 # fetch all commits/branches - - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 + - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 with: python-version: 3.x - uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index 0663e4d8ed..64872d0bef 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -18,7 +18,7 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Set up Python - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 with: python-version: "3.x" - name: Install build and publish dependencies diff --git a/.github/workflows/scenario-integration-tests.yml b/.github/workflows/scenario-integration-tests.yml index 5008a3947d..76545be8c1 100644 --- a/.github/workflows/scenario-integration-tests.yml +++ b/.github/workflows/scenario-integration-tests.yml @@ -53,7 +53,7 @@ jobs: if: steps.check-if-scenarios-or-src-changed.outputs.run_tests != 'false' run: pipx install poetry id: setup-poetry - - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 + - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 if: steps.check-if-scenarios-or-src-changed.outputs.run_tests != 'false' with: python-version: "3.12" From 7537fbb9ff3998ced98d80cae0e5288b78c43791 Mon Sep 17 00:00:00 2001 From: Mourits de Beer <31511766+ff137@users.noreply.github.com> Date: Wed, 30 Apr 2025 00:12:16 +0200 Subject: [PATCH 29/32] :construction_worker: Skip sonar-merge-main workflow if github actor is dependabot (#3691) This is because GitHub Actions does not provide repository secrets (such as SONAR_TOKEN) to workflows triggered by dependabot[bot] for security reasons. As a result, the SonarCloud scan step fails when dependabot merges to main (i.e., using `@dependabot squash and merge`, instead of manually merging oneself). By skipping the scan when the actor is dependabot, we avoid unnecessary workflow failures. Signed-off-by: ff137 --- .github/workflows/sonar-merge-main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/sonar-merge-main.yml b/.github/workflows/sonar-merge-main.yml index 3e1f97ccfd..76921880db 100644 --- a/.github/workflows/sonar-merge-main.yml +++ b/.github/workflows/sonar-merge-main.yml @@ -12,7 +12,7 @@ jobs: sonarcloud: name: SonarCloud runs-on: ubuntu-latest - if: github.repository == 'openwallet-foundation/acapy' + if: github.repository == 'openwallet-foundation/acapy' && github.actor != 'dependabot[bot]' steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: From 6a326df2f01eb27c8c00c807648949daca9620a4 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Tue, 29 Apr 2025 22:08:33 -0500 Subject: [PATCH 30/32] fix: multiuse invite derived conns should have msg id Signed-off-by: Daniel Bluhm --- .../protocols/didexchange/v1_0/manager.py | 1 + .../multiuse_invitations/docker-compose.yml | 91 +++++++++++++++++++ .../examples/multiuse_invitations/example.py | 34 +++++++ 3 files changed, 126 insertions(+) create mode 100644 scenarios/examples/multiuse_invitations/docker-compose.yml create mode 100644 scenarios/examples/multiuse_invitations/example.py diff --git a/acapy_agent/protocols/didexchange/v1_0/manager.py b/acapy_agent/protocols/didexchange/v1_0/manager.py index 39d1aaa637..42c2876fd4 100644 --- a/acapy_agent/protocols/didexchange/v1_0/manager.py +++ b/acapy_agent/protocols/didexchange/v1_0/manager.py @@ -756,6 +756,7 @@ async def _derive_new_conn_from_multiuse_invitation( """ new_conn_rec = ConnRecord( invitation_key=conn_rec.invitation_key, + invitation_msg_id=conn_rec.invitation_msg_id, state=ConnRecord.State.INIT.rfc160, accept=conn_rec.accept, their_role=conn_rec.their_role, diff --git a/scenarios/examples/multiuse_invitations/docker-compose.yml b/scenarios/examples/multiuse_invitations/docker-compose.yml new file mode 100644 index 0000000000..7bc9e0852a --- /dev/null +++ b/scenarios/examples/multiuse_invitations/docker-compose.yml @@ -0,0 +1,91 @@ + services: + alice: + image: acapy-test + ports: + - "3001:3001" + environment: + RUST_LOG: 'aries-askar::log::target=error' + command: > + start + --label Alice + --inbound-transport http 0.0.0.0 3000 + --outbound-transport http + --endpoint http://alice:3000 + --admin 0.0.0.0 3001 + --admin-insecure-mode + --tails-server-base-url http://tails:6543 + --genesis-url http://test.bcovrin.vonx.io/genesis + --wallet-type askar + --wallet-name alice + --wallet-key insecure + --auto-provision + --log-level debug + --debug-webhooks + healthcheck: + test: curl -s -o /dev/null -w '%{http_code}' "http://localhost:3001/status/live" | grep "200" > /dev/null + start_period: 30s + interval: 7s + timeout: 5s + retries: 5 + depends_on: + tails: + condition: service_started + + bob: + image: acapy-test + ports: + - "3002:3001" + environment: + RUST_LOG: 'aries-askar::log::target=error' + command: > + start + --label Bob + --inbound-transport http 0.0.0.0 3000 + --outbound-transport http + --endpoint http://bob:3000 + --admin 0.0.0.0 3001 + --admin-insecure-mode + --tails-server-base-url http://tails:6543 + --genesis-url http://test.bcovrin.vonx.io/genesis + --wallet-type askar + --wallet-name bob + --wallet-key insecure + --auto-provision + --log-level debug + --debug-webhooks + --monitor-revocation-notification + healthcheck: + test: curl -s -o /dev/null -w '%{http_code}' "http://localhost:3001/status/live" | grep "200" > /dev/null + start_period: 30s + interval: 7s + timeout: 5s + retries: 5 + + tails: + image: ghcr.io/bcgov/tails-server:latest + ports: + - 6543:6543 + environment: + - GENESIS_URL=http://test.bcovrin.vonx.io/genesis + command: > + tails-server + --host 0.0.0.0 + --port 6543 + --storage-path /tmp/tails-files + --log-level INFO + + example: + container_name: controller + build: + context: ../.. + environment: + - ALICE=http://alice:3001 + - BOB=http://bob:3001 + volumes: + - ./example.py:/usr/src/app/example.py:ro,z + command: python -m example + depends_on: + alice: + condition: service_healthy + bob: + condition: service_healthy diff --git a/scenarios/examples/multiuse_invitations/example.py b/scenarios/examples/multiuse_invitations/example.py new file mode 100644 index 0000000000..40a25536ae --- /dev/null +++ b/scenarios/examples/multiuse_invitations/example.py @@ -0,0 +1,34 @@ +"""Minimal reproducible example script. + +This script is for you to use to reproduce a bug or demonstrate a feature. +""" + +import asyncio +from os import getenv + +from acapy_controller import Controller +from acapy_controller.logging import logging_to_stdout, section +from acapy_controller.protocols import didexchange, oob_invitation + +ALICE = getenv("ALICE", "http://alice:3001") +BOB = getenv("BOB", "http://bob:3001") + + +async def main(): + """Test Controller protocols.""" + async with Controller(base_url=ALICE) as alice, Controller(base_url=BOB) as bob: + invite = await oob_invitation(alice, multi_use=True) + with section("first"): + a1, _ = await didexchange(alice, bob, invite=invite) + a1 = a1.serialize() + assert a1["invitation_msg_id"] + with section("second"): + a2, _ = await didexchange(alice, bob, invite=invite) + a2 = a2.serialize() + assert a2["invitation_msg_id"] + assert a1["invitation_msg_id"] == a2["invitation_msg_id"] + + +if __name__ == "__main__": + logging_to_stdout() + asyncio.run(main()) From e3dcabf9a2f5d989aeaea592eafd1f4cc76b3f62 Mon Sep 17 00:00:00 2001 From: Mourits de Beer <31511766+ff137@users.noreply.github.com> Date: Wed, 30 Apr 2025 18:00:58 +0200 Subject: [PATCH 31/32] :bug: Fix reading expected key in TAA (#3693) * :art: Fix type hints Signed-off-by: ff137 * :bug: Handle TAA response missing expected key Signed-off-by: ff137 --------- Signed-off-by: ff137 Co-authored-by: Stephen Curran --- acapy_agent/config/ledger.py | 6 +++++- acapy_agent/ledger/base.py | 6 +++--- acapy_agent/ledger/indy_vdr.py | 2 +- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/acapy_agent/config/ledger.py b/acapy_agent/config/ledger.py index 563ea2e638..b0b183d68e 100644 --- a/acapy_agent/config/ledger.py +++ b/acapy_agent/config/ledger.py @@ -158,7 +158,11 @@ async def ledger_config( if taa_info["taa_required"] and public_did: LOGGER.debug("TAA acceptance required") taa_accepted = await ledger.get_latest_txn_author_acceptance() - digest_match = taa_info["taa_record"]["digest"] == taa_accepted["digest"] + + taa_record_digest = taa_info["taa_record"]["digest"] # keys exist + taa_accepted_digest = taa_accepted.get("digest") # key might not exist + + digest_match = taa_record_digest == taa_accepted_digest if not taa_accepted or not digest_match: LOGGER.info("TAA acceptance needed - performing acceptance") if not await accept_taa(ledger, profile, taa_info, provision): diff --git a/acapy_agent/ledger/base.py b/acapy_agent/ledger/base.py index 0b671eb50c..d0c67f056d 100644 --- a/acapy_agent/ledger/base.py +++ b/acapy_agent/ledger/base.py @@ -179,11 +179,11 @@ async def get_wallet_public_did(self) -> DIDInfo: """Fetch the public DID from the wallet.""" @abstractmethod - async def get_txn_author_agreement(self, reload: bool = False): + async def get_txn_author_agreement(self, reload: bool = False) -> dict: """Get the current transaction author agreement, fetching it if necessary.""" @abstractmethod - async def fetch_txn_author_agreement(self): + async def fetch_txn_author_agreement(self) -> dict: """Fetch the current AML and TAA from the ledger.""" @abstractmethod @@ -193,7 +193,7 @@ async def accept_txn_author_agreement( """Save a new record recording the acceptance of the TAA.""" @abstractmethod - async def get_latest_txn_author_acceptance(self): + async def get_latest_txn_author_acceptance(self) -> dict: """Look up the latest TAA acceptance.""" def taa_digest(self, version: str, text: str): diff --git a/acapy_agent/ledger/indy_vdr.py b/acapy_agent/ledger/indy_vdr.py index 445f94fa39..9763c492a1 100644 --- a/acapy_agent/ledger/indy_vdr.py +++ b/acapy_agent/ledger/indy_vdr.py @@ -101,7 +101,7 @@ def __init__( self.genesis_hash_cache: Optional[str] = None self.genesis_txns_cache = genesis_transactions self.init_config = bool(genesis_transactions) - self.taa_cache: Optional[str] = None + self.taa_cache: Optional[dict] = None self.read_only: bool = read_only self.socks_proxy: str = socks_proxy From f87ad89b28c1c3819bc6384ad494f1c76d188973 Mon Sep 17 00:00:00 2001 From: George Mulhearn <57472912+gmulhearn@users.noreply.github.com> Date: Thu, 1 May 2025 11:14:30 +1000 Subject: [PATCH 32/32] (fix) W3C LDP Fixes for alternative VMs (#3641) --- .../v2_0/formats/ld_proof/handler.py | 17 +- .../v2_0/formats/ld_proof/tests/fixtures.py | 11 +- .../formats/ld_proof/tests/test_handler.py | 25 --- .../cryptosuites/eddsa_jcs_2022.py | 2 +- acapy_agent/vc/vc_ld/manager.py | 76 ++------ acapy_agent/vc/vc_ld/tests/test_manager.py | 91 ++-------- .../default_verification_key_strategy.py | 164 +++++++++++++----- acapy_agent/wallet/keys/manager.py | 88 +++++++--- .../test_default_verification_key_strategy.py | 104 ++++++++++- 9 files changed, 335 insertions(+), 243 deletions(-) diff --git a/acapy_agent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py b/acapy_agent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py index 04e7761d49..c1320f33de 100644 --- a/acapy_agent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py +++ b/acapy_agent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py @@ -7,6 +7,11 @@ from pyld import jsonld from pyld.jsonld import JsonLdProcessor +from acapy_agent.wallet.default_verification_key_strategy import ( + BaseVerificationKeyStrategy, + VerificationKeyStrategyError, +) + from ......messaging.decorators.attach_decorator import AttachDecorator from ......storage.vc_holder.base import VCHolder from ......storage.vc_holder.vc_record import VCRecord @@ -185,10 +190,16 @@ async def create_offer( # Make sure we can issue with the did and proof type try: - await manager.assert_can_issue_with_id_and_proof_type( - detail.credential.issuer_id, detail.options.proof_type + # Check suitable verification method for signing (fails if none suitable) + verkey_id_strategy = self.profile.context.inject(BaseVerificationKeyStrategy) + _ = await verkey_id_strategy.get_verification_method_id_for_did( + detail.credential.issuer_id, + self.profile, + proof_type=detail.options.proof_type, + proof_purpose="assertionMethod", + verification_method_id=detail.options.verification_method, ) - except VcLdpManagerError as err: + except VerificationKeyStrategyError as err: raise V20CredFormatError( "Checking whether issuance is possible failed" ) from err diff --git a/acapy_agent/protocols/issue_credential/v2_0/formats/ld_proof/tests/fixtures.py b/acapy_agent/protocols/issue_credential/v2_0/formats/ld_proof/tests/fixtures.py index 02c6205092..1ecde70c2c 100644 --- a/acapy_agent/protocols/issue_credential/v2_0/formats/ld_proof/tests/fixtures.py +++ b/acapy_agent/protocols/issue_credential/v2_0/formats/ld_proof/tests/fixtures.py @@ -1,5 +1,6 @@ TEST_DID_SOV = "did:sov:LjgpST2rjsoxYegQDRm7EL" -TEST_DID_KEY = "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" +TEST_DID_KEY_ED25519 = "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" +TEST_DID_KEY_BLS_G2 = "did:key:zUC74E9UD2W6Q1MgPexCEdpstiCsY1Vbnyqepygk7McZVce38L1tGX7qZ2SgY4Zz2m9FUB4Xb5cEHSujks9XeKDzqe4QzW3CyyJ1cv8iBLNqU61EfkBoW2yEkg6VgqHTDtANYRS" LD_PROOF_VC_DETAIL = { "credential": { @@ -10,7 +11,7 @@ "type": ["VerifiableCredential", "UniversityDegreeCredential"], "credentialSubject": {"test": "key"}, "issuanceDate": "2021-04-12", - "issuer": TEST_DID_KEY, + "issuer": TEST_DID_KEY_ED25519, }, "options": { "proofType": "Ed25519Signature2018", @@ -26,7 +27,7 @@ "type": ["VerifiableCredential", "UniversityDegreeCredential"], "credentialSubject": {"test": "key"}, "issuanceDate": "2021-04-12", - "issuer": TEST_DID_KEY, + "issuer": TEST_DID_KEY_BLS_G2, }, "options": { "proofType": "BbsBlsSignature2020", @@ -42,7 +43,7 @@ "type": ["VerifiableCredential", "UniversityDegreeCredential"], "credentialSubject": {"test": "key"}, "issuanceDate": "2021-04-12", - "issuer": TEST_DID_KEY, + "issuer": TEST_DID_KEY_ED25519, }, "options": { "proofType": "Ed25519Signature2020", @@ -57,7 +58,7 @@ "type": ["VerifiableCredential", "UniversityDegreeCredential"], "credentialSubject": {"test": "key"}, "issuanceDate": "2021-04-12", - "issuer": TEST_DID_KEY, + "issuer": TEST_DID_KEY_ED25519, "proof": { "proofPurpose": "assertionMethod", "created": "2019-12-11T03:50:55", diff --git a/acapy_agent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py b/acapy_agent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py index a534c266b2..7c7c80f3b7 100644 --- a/acapy_agent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py +++ b/acapy_agent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py @@ -192,22 +192,12 @@ async def test_receive_proposal(self): async def test_create_offer(self): with ( - mock.patch.object( - VcLdpManager, - "assert_can_issue_with_id_and_proof_type", - mock.CoroutineMock(), - ) as mock_can_issue, patch.object(test_module, "get_properties_without_context", return_value=[]), ): (cred_format, attachment) = await self.handler.create_offer( self.cred_proposal ) - mock_can_issue.assert_called_once_with( - LD_PROOF_VC_DETAIL["credential"]["issuer"], - LD_PROOF_VC_DETAIL["options"]["proofType"], - ) - # assert identifier match assert cred_format.attach_id == self.handler.format.api == attachment.ident @@ -233,11 +223,6 @@ async def test_create_offer_adds_bbs_context(self): ) with ( - mock.patch.object( - VcLdpManager, - "assert_can_issue_with_id_and_proof_type", - mock.CoroutineMock(), - ), patch.object(test_module, "get_properties_without_context", return_value=[]), ): (cred_format, attachment) = await self.handler.create_offer(cred_proposal) @@ -261,11 +246,6 @@ async def test_create_offer_adds_ed25519_2020_context(self): ) with ( - mock.patch.object( - VcLdpManager, - "assert_can_issue_with_id_and_proof_type", - mock.CoroutineMock(), - ), patch.object(test_module, "get_properties_without_context", return_value=[]), ): (cred_format, attachment) = await self.handler.create_offer(cred_proposal) @@ -286,11 +266,6 @@ async def test_create_offer_x_no_proposal(self): async def test_create_offer_x_wrong_attributes(self): missing_properties = ["foo"] with ( - mock.patch.object( - self.manager, - "assert_can_issue_with_id_and_proof_type", - mock.CoroutineMock(), - ), patch.object( test_module, "get_properties_without_context", diff --git a/acapy_agent/vc/data_integrity/cryptosuites/eddsa_jcs_2022.py b/acapy_agent/vc/data_integrity/cryptosuites/eddsa_jcs_2022.py index dd6bcaf460..15c37bd88b 100644 --- a/acapy_agent/vc/data_integrity/cryptosuites/eddsa_jcs_2022.py +++ b/acapy_agent/vc/data_integrity/cryptosuites/eddsa_jcs_2022.py @@ -194,7 +194,7 @@ async def proof_verification( """ multikey = await MultikeyManager( self.session - ).resolve_multikey_from_verification_method(options.verification_method) + ).resolve_multikey_from_verification_method_id(options.verification_method) verkey = multikey_to_verkey(multikey) key_type = key_type_from_multikey(multikey) return await self.wallet.verify_message( diff --git a/acapy_agent/vc/vc_ld/manager.py b/acapy_agent/vc/vc_ld/manager.py index 46ea6fad36..a1f5cbe377 100644 --- a/acapy_agent/vc/vc_ld/manager.py +++ b/acapy_agent/vc/vc_ld/manager.py @@ -3,6 +3,7 @@ from datetime import datetime, timezone from typing import Dict, List, Optional, Type, Union, cast +from acapy_agent.wallet.keys.manager import MultikeyManager, multikey_to_verkey from pyld import jsonld from pyld.jsonld import JsonLdProcessor @@ -16,7 +17,6 @@ from ...wallet.base import BaseWallet from ...wallet.default_verification_key_strategy import BaseVerificationKeyStrategy from ...wallet.did_info import DIDInfo -from ...wallet.error import WalletNotFoundError from ...wallet.key_type import BLS12381G2, ED25519, P256, KeyType from ..ld_proofs.constants import ( CREDENTIALS_CONTEXT_V1_URL, @@ -133,60 +133,6 @@ async def _did_info_for_did(self, did: str) -> DIDInfo: # All other methods we can just query return await wallet.get_local_did(did) - async def assert_can_issue_with_id_and_proof_type( - self, issuer_id: Optional[str], proof_type: Optional[str] - ): - """Assert that it is possible to issue using the specified id and proof type. - - Args: - issuer_id (str): The issuer id - proof_type (str): the signature suite proof type - - Raises: - VcLdpManagerError: - - If the proof type is not supported - - If the issuer id is not a did - - If the did is not found in th wallet - - If the did does not support to create signatures for the proof type - - """ - if not issuer_id or not proof_type: - raise VcLdpManagerError( - "Issuer id and proof type are required to issue a credential." - ) - - try: - # Check if it is a proof type we can issue with - if proof_type not in PROOF_TYPE_SIGNATURE_SUITE_MAPPING.keys(): - raise VcLdpManagerError( - f"Unable to sign credential with unsupported proof type {proof_type}." - f" Supported proof types: {PROOF_TYPE_SIGNATURE_SUITE_MAPPING.keys()}" - ) - - if not issuer_id.startswith("did:"): - raise VcLdpManagerError( - f"Unable to issue credential with issuer id: {issuer_id}." - " Only issuance with DIDs is supported" - ) - - # Retrieve did from wallet. Will throw if not found - did = await self._did_info_for_did(issuer_id) - - # Raise error if we cannot issue a credential with this proof type - # using this DID from - did_proof_types = KEY_TYPE_SIGNATURE_TYPE_MAPPING[did.key_type] - if proof_type not in did_proof_types: - raise VcLdpManagerError( - f"Unable to issue credential with issuer id {issuer_id} and proof " - f"type {proof_type}. DID only supports proof types {did_proof_types}" - ) - - except WalletNotFoundError: - raise VcLdpManagerError( - f"Issuer did {issuer_id} not found." - " Unable to issue credential with this DID." - ) - async def _get_suite( self, *, @@ -210,6 +156,11 @@ async def _get_suite( "using external provider." ) from error + async with self.profile.session() as session: + key_manager = MultikeyManager(session) + key_info = await key_manager.resolve_and_bind_kid(verification_method) + multikey = key_info["multikey"] + # Get signature class based on proof type SignatureClass = PROOF_TYPE_SIGNATURE_SUITE_MAPPING[proof_type] @@ -220,7 +171,7 @@ async def _get_suite( key_pair=WalletKeyPair( profile=self.profile, key_type=SIGNATURE_SUITE_KEY_TYPE_MAPPING[SignatureClass], - public_key_base58=did_info.verkey if did_info else None, + public_key_base58=multikey_to_verkey(multikey), ), ) @@ -360,9 +311,6 @@ async def _get_suite_for_document( if not proof_type: raise VcLdpManagerError("Proof type is required") - # Assert we can issue the credential based on issuer + proof_type - await self.assert_can_issue_with_id_and_proof_type(issuer_id, proof_type) - # Create base proof object with options proof = LDProof( created=options.created, @@ -371,20 +319,22 @@ async def _get_suite_for_document( ) did_info = await self._did_info_for_did(issuer_id) + + # Determine/check suitable verification method for signing. fails if none suitable verkey_id_strategy = self.profile.context.inject(BaseVerificationKeyStrategy) - verification_method = ( - options.verification_method - or await verkey_id_strategy.get_verification_method_id_for_did( + verification_method_id = ( + await verkey_id_strategy.get_verification_method_id_for_did( issuer_id, self.profile, proof_type=proof_type, proof_purpose="assertionMethod", + verification_method_id=options.verification_method, ) ) suite = await self._get_suite( proof_type=proof_type, - verification_method=verification_method, + verification_method=verification_method_id, proof=proof.serialize(), did_info=did_info, ) diff --git a/acapy_agent/vc/vc_ld/tests/test_manager.py b/acapy_agent/vc/vc_ld/tests/test_manager.py index 381c4265ec..472fd01c63 100644 --- a/acapy_agent/vc/vc_ld/tests/test_manager.py +++ b/acapy_agent/vc/vc_ld/tests/test_manager.py @@ -5,6 +5,7 @@ import pytest from acapy_agent.tests import mock +from acapy_agent.wallet.keys.manager import MultikeyManager from ....did.did_key import DIDKey from ....resolver.default.key import KeyDIDResolver @@ -16,9 +17,7 @@ BaseVerificationKeyStrategy, DefaultVerificationKeyStrategy, ) -from ....wallet.did_info import DIDInfo from ....wallet.did_method import KEY, SOV, DIDMethod, DIDMethods -from ....wallet.error import WalletNotFoundError from ....wallet.key_type import BLS12381G2, ED25519, KeyTypes from ...ld_proofs.constants import ( SECURITY_CONTEXT_BBS_URL, @@ -39,6 +38,9 @@ TEST_DID_SOV = "did:sov:LjgpST2rjsoxYegQDRm7EL" TEST_DID_KEY = "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" +TEST_DID_KEY_SEED = "testseed000000000000000000000001" +TEST_DID_KEY_VM = "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL#z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" +TEST_DID_KEY_VERKEY = "3Dn1SJNPaCXcvvJvSbsFWP2xaCjMom3can8CQNhWrTRx" TEST_UUID = "urn:uuid:dc86e95c-dc85-4f91-b563-82657d095c44" VC = { "credential": { @@ -85,63 +87,10 @@ async def asyncSetUp(self) -> None: self.vc = VerifiableCredential.deserialize(VC["credential"]) self.options = LDProofVCOptions.deserialize(VC["options"]) - async def test_assert_can_issue_with_id_and_proof_type(self): - with pytest.raises(VcLdpManagerError) as context: - await self.manager.assert_can_issue_with_id_and_proof_type( - "issuer_id", "random_proof_type" - ) - - assert ( - "Unable to sign credential with unsupported proof type random_proof_type" - in str(context.value) - ) - - with pytest.raises(VcLdpManagerError) as context: - await self.manager.assert_can_issue_with_id_and_proof_type( - "not_did", Ed25519Signature2018.signature_type - ) - assert "Unable to issue credential with issuer id: not_did" in str(context.value) - - with mock.patch.object( - self.manager, - "_did_info_for_did", - mock.CoroutineMock(), - ) as mock_did_info: - did_info = DIDInfo( - did=TEST_DID_SOV, - verkey="verkey", - metadata={}, - method=SOV, - key_type=ED25519, - ) - mock_did_info.return_value = did_info - await self.manager.assert_can_issue_with_id_and_proof_type( - "did:key:found", Ed25519Signature2018.signature_type - ) - await self.manager.assert_can_issue_with_id_and_proof_type( - "did:key:found", Ed25519Signature2020.signature_type - ) - - invalid_did_info = DIDInfo( - did=TEST_DID_SOV, - verkey="verkey", - metadata={}, - method=SOV, - key_type=BLS12381G2, + async with self.profile.session() as session: + await MultikeyManager(session=session).create( + seed=TEST_DID_KEY_SEED, alg="ed25519" ) - mock_did_info.return_value = invalid_did_info - with pytest.raises(VcLdpManagerError) as context: - await self.manager.assert_can_issue_with_id_and_proof_type( - "did:key:found", Ed25519Signature2018.signature_type - ) - assert "Unable to issue credential with issuer id" in str(context.value) - - mock_did_info.side_effect = (WalletNotFoundError,) - with pytest.raises(VcLdpManagerError) as context: - await self.manager.assert_can_issue_with_id_and_proof_type( - "did:key:notfound", Ed25519Signature2018.signature_type - ) - assert "Issuer did did:key:notfound not found" in str(context.value) methods: list[DIDMethod] = [SOV, KEY] @@ -158,11 +107,6 @@ async def test_get_did_info_for_did_sov(self): async def test_get_suite_for_document(self): with ( - mock.patch.object( - self.manager, - "assert_can_issue_with_id_and_proof_type", - mock.CoroutineMock(), - ) as mock_can_issue, mock.patch.object( self.manager, "_did_info_for_did", @@ -177,9 +121,8 @@ async def test_get_suite_for_document(self): assert suite.proof == {"created": VC["options"]["created"]} assert isinstance(suite.key_pair, WalletKeyPair) assert suite.key_pair.key_type == ED25519 - assert suite.key_pair.public_key_base58 == mock_did_info.return_value.verkey + assert suite.key_pair.public_key_base58 == TEST_DID_KEY_VERKEY - mock_can_issue.assert_called() mock_did_info.assert_awaited_once_with(self.vc.issuer) async def test_get_suite(self): @@ -188,45 +131,45 @@ async def test_get_suite(self): suite = await self.manager._get_suite( proof_type=BbsBlsSignature2020.signature_type, - verification_method="verification_method", + verification_method=TEST_DID_KEY_VM, proof=proof, did_info=did_info, ) assert isinstance(suite, BbsBlsSignature2020) - assert suite.verification_method == "verification_method" + assert suite.verification_method == TEST_DID_KEY_VM assert suite.proof == proof assert isinstance(suite.key_pair, WalletKeyPair) assert suite.key_pair.key_type == BLS12381G2 - assert suite.key_pair.public_key_base58 == did_info.verkey + assert suite.key_pair.public_key_base58 == TEST_DID_KEY_VERKEY suite = await self.manager._get_suite( proof_type=Ed25519Signature2018.signature_type, - verification_method="verification_method", + verification_method=TEST_DID_KEY_VM, proof=proof, did_info=did_info, ) assert isinstance(suite, Ed25519Signature2018) - assert suite.verification_method == "verification_method" + assert suite.verification_method == TEST_DID_KEY_VM assert suite.proof == proof assert isinstance(suite.key_pair, WalletKeyPair) assert suite.key_pair.key_type == ED25519 - assert suite.key_pair.public_key_base58 == did_info.verkey + assert suite.key_pair.public_key_base58 == TEST_DID_KEY_VERKEY suite = await self.manager._get_suite( proof_type=Ed25519Signature2020.signature_type, - verification_method="verification_method", + verification_method=TEST_DID_KEY_VM, proof=proof, did_info=did_info, ) assert isinstance(suite, Ed25519Signature2020) - assert suite.verification_method == "verification_method" + assert suite.verification_method == TEST_DID_KEY_VM assert suite.proof == proof assert isinstance(suite.key_pair, WalletKeyPair) assert suite.key_pair.key_type == ED25519 - assert suite.key_pair.public_key_base58 == did_info.verkey + assert suite.key_pair.public_key_base58 == TEST_DID_KEY_VERKEY async def test_get_proof_purpose(self): purpose = self.manager._get_proof_purpose() diff --git a/acapy_agent/wallet/default_verification_key_strategy.py b/acapy_agent/wallet/default_verification_key_strategy.py index f408b8f926..26b3c0bb58 100644 --- a/acapy_agent/wallet/default_verification_key_strategy.py +++ b/acapy_agent/wallet/default_verification_key_strategy.py @@ -6,6 +6,13 @@ from pydid import DIDDocument, VerificationMethod +from acapy_agent.wallet.key_type import BLS12381G2, ED25519, P256 +from acapy_agent.wallet.keys.manager import ( + MultikeyManager, + key_type_from_multikey, + multikey_from_verification_method, +) + from ..core.error import BaseError from ..core.profile import Profile from ..did.did_key import DIDKey @@ -43,32 +50,39 @@ async def get_verification_method_id_for_did( *, proof_type: Optional[str] = None, proof_purpose: Optional[ProofPurposeStr] = None, + verification_method_id: Optional[str] = None, ) -> str: - """Given a DID, returns the verification key ID in use. + """Find suitable VerificationMethod. - Returns None if no strategy is specified for this DID. + Given a DID and other verification method requirements, + find and return the first suitable verification method ID. + Throws if no suitable verification method :params did: the did :params profile: context of the call - :params allowed_verification_method_types: list of accepted key types + :params proof_type: the JSON-LD proof type which the verification method + should be able to produce. :params proof_purpose: the verkey relationship (assertionMethod, keyAgreement, ..) - :returns Optional[str]: the current verkey ID + :params verification_method_id: the verification method ID which must match. + :returns str: the first suitable verification method """ ... class DefaultVerificationKeyStrategy(BaseVerificationKeyStrategy): - """A basic implementation for verkey strategy. - - Supports did:key: and did:sov only. - """ + """A basic implementation for verkey strategy.""" def __init__(self): - """Initialize the key types mapping.""" + """Initialize the key types mapping. + + Map of LDP signature suite (proofType) to suitable key types + """ self.key_types_mapping = { - "Ed25519Signature2018": ["Ed25519VerificationKey2018"], - "Ed25519Signature2020": ["Ed25519VerificationKey2020", "Multikey"], - "BbsBlsSignature2020": ["Bls12381G2Key2020"], + "Ed25519Signature2018": [ED25519], + "Ed25519Signature2020": [ED25519], + "EcdsaSecp256r1Signature2019": [P256], + "BbsBlsSignature2020": [BLS12381G2], + "BbsBlsSignatureProof2020": [BLS12381G2], } async def get_verification_method_id_for_did( @@ -78,57 +92,119 @@ async def get_verification_method_id_for_did( *, proof_type: Optional[str] = None, proof_purpose: Optional[ProofPurposeStr] = None, + verification_method_id: Optional[str] = None, ) -> str: - """Given a did:key or did:sov, returns the verification key ID in use. - - Returns None if no strategy is specified for this DID. - - :params did: the did - :params profile: context of the call - :params allowed_verification_method_types: list of accepted key types - :params proof_purpose: the verkey relationship (assertionMethod, keyAgreement, ..) - :returns Optional[str]: the current verkey ID - """ - proof_type = proof_type or "Ed25519Signature2018" + """Find suitable VerificationMethod.""" proof_purpose = proof_purpose or "assertionMethod" if proof_purpose not in PROOF_PURPOSES: raise ValueError("Invalid proof purpose") + if proof_type is not None: + suitable_key_types = self.key_types_mapping.get(proof_type) + else: + # any key is suitable if no proof type requirement set + suitable_key_types = list( + {val for values in self.key_types_mapping.values() for val in values} + ) + if not suitable_key_types: + raise VerificationKeyStrategyError( + f"proof type {proof_type} is not supported" + ) + + # handle default hardcoded cases if did.startswith("did:key:"): - return DIDKey.from_did(did).key_id + didkey = DIDKey.from_did(did) + vm_id = didkey.key_id + if didkey.key_type not in suitable_key_types: + raise VerificationKeyStrategyError( + f"DID {did} has wrong key type for proof type {proof_type}" + ) + if verification_method_id is not None and vm_id != verification_method_id: + raise VerificationKeyStrategyError( + f"Verification method ID {verification_method_id} \ + cannot be used with DID {did}" + ) + return vm_id elif did.startswith("did:sov:"): # key-1 is what uniresolver uses for key id - return did + "#key-1" + vm_id = did + "#key-1" + if ED25519 not in suitable_key_types: + raise VerificationKeyStrategyError( + "did:sov only capable of ED25519 based proof types" + ) + if verification_method_id is not None and vm_id != verification_method_id: + raise VerificationKeyStrategyError( + f"Verification method ID {verification_method_id} \ + cannot be used with DID {did}" + ) + return vm_id + + # else, handle generically for any DID + + # TODO - if the local representation of the DID contains all this information, + # DID resolution cost could be avoided. However, for now there is not adequate + # information locally to determine if a DID/VM is suitable. + + # shortcut path: if a VM ID is specified, fetch it with multikey and perform + # basic checks of VM suitability. + # NOTE: this skips the proofPurpose check, as that is not currently possible + # without resolving the DID (expensive) + if verification_method_id is not None: + async with profile.session() as session: + key_manager = MultikeyManager(session=session) + key_info = await key_manager.resolve_and_bind_kid(verification_method_id) + key_type = key_type_from_multikey(multikey=key_info["multikey"]) + if key_type not in suitable_key_types: + raise VerificationKeyStrategyError( + f"VerificationMethod {verification_method_id} has wrong key type \ + for proof type {proof_type}" + ) + return verification_method_id resolver = profile.inject(DIDResolver) doc_raw = await resolver.resolve(profile=profile, did=did) doc = DIDDocument.deserialize(doc_raw) + # get verification methods for the proof purpose methods_or_refs = doc_raw.get(proof_purpose, []) - # Dereference any refs in the verification relationship - methods = [ - await resolver.dereference_verification_method(profile, method, document=doc) - if isinstance(method, str) - else VerificationMethod.deserialize(method) - for method in methods_or_refs - ] - - method_types = self.key_types_mapping.get(proof_type) - if not method_types: - raise VerificationKeyStrategyError( - f"proof type {proof_type} is not supported" - ) - # Filter methods by type expected for proof_type - methods = [vm for vm in methods if vm.type in method_types] - if not methods: + # apply various filters to determine set of suitable verification methods + suitable_methods = [] + async with profile.session() as session: + key_manager = MultikeyManager(session=session) + for method_or_ref in methods_or_refs: + # Dereference any refs in the verification relationship + if isinstance(method_or_ref, str): + vm_id = await resolver.dereference_verification_method( + profile, method_or_ref, document=doc + ) + else: + vm_id = VerificationMethod.deserialize(method_or_ref) + + vm_multikey = multikey_from_verification_method(vm_id) + + # filter methods by key type expected for proof_type + vm_key_type = key_type_from_multikey(vm_multikey) + if vm_key_type not in suitable_key_types: + continue + + # filter methods for keys actually owned by the wallet + if not await key_manager.multikey_exists( + multikey_from_verification_method(vm_id) + ): + continue + + # survived all filters + suitable_methods.append(vm_id) + + if not suitable_methods: raise VerificationKeyStrategyError( f"No matching verification method found for did {did} with proof " f"type {proof_type} and purpose {proof_purpose}" ) - if len(methods) > 1: + if len(suitable_methods) > 1: LOGGER.info( ( "More than 1 verification method matched for did %s with proof " @@ -137,7 +213,7 @@ async def get_verification_method_id_for_did( did, proof_type, proof_purpose, - methods[0].id, + suitable_methods[0].id, ) - return methods[0].id + return suitable_methods[0].id diff --git a/acapy_agent/wallet/keys/manager.py b/acapy_agent/wallet/keys/manager.py index a2bab470b8..ef8908bf70 100644 --- a/acapy_agent/wallet/keys/manager.py +++ b/acapy_agent/wallet/keys/manager.py @@ -1,5 +1,6 @@ """Multikey class.""" +import logging from ...core.profile import ProfileSession from ...resolver.did_resolver import DIDResolver from ...utils.multiformats import multibase @@ -7,6 +8,9 @@ from ..base import BaseWallet from ..key_type import BLS12381G2, ED25519, P256, KeyType from ..util import b58_to_bytes, bytes_to_b58 +from pydid import VerificationMethod + +LOGGER = logging.getLogger(__name__) DEFAULT_ALG = "ed25519" ALG_MAPPINGS = { @@ -59,6 +63,31 @@ def key_type_from_multikey(multikey: str) -> KeyType: raise MultikeyManagerError(f"Unsupported key algorithm for multikey {multikey}.") +def multikey_from_verification_method(verification_method: VerificationMethod) -> str: + """Derive a multikey from a VerificationMethod.""" + if verification_method.type == "Multikey": + multikey = verification_method.public_key_multibase + + elif verification_method.type == "Ed25519VerificationKey2018": + multikey = verkey_to_multikey( + verification_method.public_key_base58, alg="ed25519" + ) + + elif verification_method.type == "Ed25519VerificationKey2020": + multikey = verification_method.public_key_multibase + + elif verification_method.type == "Bls12381G2Key2020": + multikey = verkey_to_multikey( + verification_method.public_key_base58, alg="bls12381g2" + ) + # TODO address JsonWebKey based verification methods + + else: + raise MultikeyManagerError("Unknown verification method type.") + + return multikey + + class MultikeyManagerError(Exception): """Generic MultikeyManager Error.""" @@ -72,35 +101,32 @@ def __init__(self, session: ProfileSession): self.session: ProfileSession = session self.wallet: BaseWallet = session.inject(BaseWallet) - async def resolve_multikey_from_verification_method(self, kid: str): - """Derive a multikey from the verification method.""" + async def resolve_and_bind_kid(self, kid: str): + """Fetch key if exists, otherwise resolve and bind it. + + This function is idempotent. + """ + if await self.kid_exists(kid): + LOGGER.debug(f"kid {kid} already bound in storage, will not resolve.") + return await self.from_kid(kid) + else: + multikey = await self.resolve_multikey_from_verification_method_id(kid) + LOGGER.debug( + f"kid {kid} binding not found in storage, \ + binding to resolved multikey {multikey}." + ) + return await self.update(multikey, kid) + + async def resolve_multikey_from_verification_method_id(self, kid: str): + """Derive a multikey from the verification method ID.""" resolver = self.session.inject(DIDResolver) verification_method = await resolver.dereference( profile=self.session.profile, did_url=kid ) - if verification_method.type == "Multikey": - multikey = verification_method.public_key_multibase - - elif verification_method.type == "Ed25519VerificationKey2018": - multikey = verkey_to_multikey( - verification_method.public_key_base58, alg="ed25519" - ) - - elif verification_method.type == "Ed25519VerificationKey2020": - multikey = verification_method.public_key_multibase + return multikey_from_verification_method(verification_method) - elif verification_method.type == "Bls12381G2Key2020": - multikey = verkey_to_multikey( - verification_method.public_key_base58, alg="bls12381g2" - ) - - else: - raise MultikeyManagerError("Unknown verification method type.") - - return multikey - - def key_type_from_multikey(self, multikey: str): + def key_type_from_multikey(self, multikey: str) -> KeyType: """Derive key_type class from multikey prefix.""" for mapping in ALG_MAPPINGS: if multikey.startswith(ALG_MAPPINGS[mapping]["multikey_prefix"]): @@ -116,6 +142,22 @@ async def kid_exists(self, kid: str): if key: return True + return False + + except (WalletNotFoundError, AttributeError): + return False + + async def multikey_exists(self, multikey: str): + """Check if a multikey exists in the wallet.""" + + try: + key_info = await self.wallet.get_signing_key( + verkey=multikey_to_verkey(multikey) + ) + + if key_info: + return True + return False except (WalletNotFoundError, AttributeError): return False diff --git a/acapy_agent/wallet/tests/test_default_verification_key_strategy.py b/acapy_agent/wallet/tests/test_default_verification_key_strategy.py index f5f9d75e01..c94cc29af8 100644 --- a/acapy_agent/wallet/tests/test_default_verification_key_strategy.py +++ b/acapy_agent/wallet/tests/test_default_verification_key_strategy.py @@ -3,6 +3,8 @@ import pytest from acapy_agent.resolver.did_resolver import DIDResolver +from acapy_agent.wallet.key_type import KeyTypes +from acapy_agent.wallet.keys.manager import MultikeyManager from ...did.did_key import DIDKey from ...resolver.tests.test_did_resolver import MockResolver @@ -10,7 +12,12 @@ from ...wallet.default_verification_key_strategy import DefaultVerificationKeyStrategy TEST_DID_SOV = "did:sov:LjgpST2rjsoxYegQDRm7EL" -TEST_DID_KEY = "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" +TEST_SEED = "testseed000000000000000000000001" +TEST_ED25519_MULTIKEY = "z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" +TEST_ED25519_VERKEY = "3Dn1SJNPaCXcvvJvSbsFWP2xaCjMom3can8CQNhWrTRx" +TEST_BLS_G2_MULTIKEY = "zUC74E9UD2W6Q1MgPexCEdpstiCsY1Vbnyqepygk7McZVce38L1tGX7qZ2SgY4Zz2m9FUB4Xb5cEHSujks9XeKDzqe4QzW3CyyJ1cv8iBLNqU61EfkBoW2yEkg6VgqHTDtANYRS" +TEST_BLS_G2_VERKEY = "pPbb9Lqs3PVTyiHM4h8fbQqxHjBPm1Hixb6vdW9kkjHEij4FZrigkaV1P5DjWTbcKxeeYfkQuZMmozRQV3tH1gXhCA972LAXMGSKH7jxz8sNJqrCR6o8asgXDeYZeL1W3p8" +TEST_DID_KEY = f"did:key:{TEST_ED25519_MULTIKEY}" class TestDefaultVerificationKeyStrategy(IsolatedAsyncioTestCase): @@ -27,40 +34,54 @@ async def asyncSetUp(self) -> None: ], "id": "did:example:123", "verificationMethod": [ + # VM has a key not owned by this acapy agent + { + "id": "did:example:123#not-owned", + "type": "Multikey", + "controller": "did:example:123", + "publicKeyMultibase": "z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDHHyGo38EefXmgDL", + }, { "id": "did:example:123#key-1", "type": "Multikey", "controller": "did:example:123", - "publicKeyMultibase": "z6MkjYXizfaAXTriV3h2Vc9uxJ9AMQpfG7mE1WKMnn1KJvFE", + "publicKeyMultibase": TEST_ED25519_MULTIKEY, }, { "id": "did:example:123#key-2", "type": "Multikey", "controller": "did:example:123", - "publicKeyMultibase": "z6MkjYXizfaAXTriV3h2Vc9uxJ9AMQpfG7mE1WKMnn1KJvFE", + "publicKeyMultibase": TEST_ED25519_MULTIKEY, }, { "id": "did:example:123#key-3", "type": "Ed25519VerificationKey2018", "controller": "did:example:123", - "publicKeyBase58": "66GgQRKjBvNFNYrKp3C57CbAXqYorEWsKVQRxW3JPhTr", + "publicKeyBase58": TEST_ED25519_VERKEY, }, ], "authentication": ["did:example:123#key-1"], "assertionMethod": [ + "did:example:123#not-owned", "did:example:123#key-2", "did:example:123#key-3", { "id": "did:example:123#key-4", "type": "Bls12381G2Key2020", "controller": "did:example:123", - "publicKeyBase58": "25EEkQtcLKsEzQ6JTo9cg4W7NHpaurn4Wg6LaNPFq6JQXnrP91SDviUz7KrJVMJd76CtAZFsRLYzvgX2JGxo2ccUHtuHk7ELCWwrkBDfrXCFVfqJKDootee9iVaF6NpdJtBE", + "publicKeyBase58": TEST_BLS_G2_VERKEY, }, ], }, ) ) self.profile.context.injector.bind_instance(DIDResolver, resolver) + self.profile.context.injector.bind_instance(KeyTypes, KeyTypes()) + async with self.profile.session() as session: + await MultikeyManager(session=session).create(seed=TEST_SEED, alg="ed25519") + await MultikeyManager(session=session).create( + seed=TEST_SEED, alg="bls12381g2" + ) async def test_with_did_sov(self): strategy = DefaultVerificationKeyStrategy() @@ -68,6 +89,18 @@ async def test_with_did_sov(self): await strategy.get_verification_method_id_for_did(TEST_DID_SOV, self.profile) == TEST_DID_SOV + "#key-1" ) + with pytest.raises(Exception): + await strategy.get_verification_method_id_for_did( + did=TEST_DID_SOV, + profile=self.profile, + proof_type="BbsBlsSignature2020", + ) + with pytest.raises(Exception): + await strategy.get_verification_method_id_for_did( + did=TEST_DID_SOV, + profile=self.profile, + verification_method_id=f"{TEST_DID_KEY}#key-2", + ) async def test_with_did_key(self): strategy = DefaultVerificationKeyStrategy() @@ -75,6 +108,18 @@ async def test_with_did_key(self): await strategy.get_verification_method_id_for_did(TEST_DID_KEY, self.profile) == DIDKey.from_did(TEST_DID_KEY).key_id ) + with pytest.raises(Exception): + await strategy.get_verification_method_id_for_did( + did=TEST_DID_KEY, + profile=self.profile, + proof_type="BbsBlsSignature2020", + ) + with pytest.raises(Exception): + await strategy.get_verification_method_id_for_did( + did=TEST_DID_KEY, + profile=self.profile, + verification_method_id=f"{TEST_DID_KEY}#abc", + ) async def test_with_did_for_assertion(self): strategy = DefaultVerificationKeyStrategy() @@ -94,6 +139,16 @@ async def test_with_did_for_assertion(self): proof_type="Ed25519Signature2018", proof_purpose="assertionMethod", ) + == "did:example:123#key-2" + ) + assert ( + await strategy.get_verification_method_id_for_did( + "did:example:123", + self.profile, + proof_type="Ed25519Signature2018", + proof_purpose="assertionMethod", + verification_method_id="did:example:123#key-3", + ) == "did:example:123#key-3" ) assert ( @@ -106,6 +161,45 @@ async def test_with_did_for_assertion(self): == "did:example:123#key-4" ) + async def test_fail_cases(self): + strategy = DefaultVerificationKeyStrategy() + # base case + assert ( + await strategy.get_verification_method_id_for_did( + "did:example:123", + self.profile, + proof_type="Ed25519Signature2020", + proof_purpose="assertionMethod", + verification_method_id="did:example:123#key-2", + ) + == "did:example:123#key-2" + ) + with pytest.raises(Exception): + # nothing suitable for purpose + await strategy.get_verification_method_id_for_did( + "did:example:123", + self.profile, + proof_type="Ed25519Signature2020", + proof_purpose="capabilityInvocation", + ) + with pytest.raises(Exception): + # nothing suitable for proof type + await strategy.get_verification_method_id_for_did( + "did:example:123", + self.profile, + proof_type="EcdsaSecp256r1Signature2019", + proof_purpose="assertionMethod", + ) + with pytest.raises(Exception): + # suitable, but key not owned by acapy + await strategy.get_verification_method_id_for_did( + "did:example:123", + self.profile, + proof_type="Ed25519Signature2020", + proof_purpose="assertionMethod", + verification_method_id="did:example:123#not-owned", + ) + async def test_unsupported_did_method(self): strategy = DefaultVerificationKeyStrategy() with pytest.raises(Exception):