From bed6e111d68486c9419c7ddb98225a93090ed9a0 Mon Sep 17 00:00:00 2001 From: ff137 Date: Fri, 2 Feb 2024 12:18:11 +0200 Subject: [PATCH 01/69] :recycle: Replace deprecated `pkg_resources` with `importlib` Signed-off-by: ff137 --- aries_cloudagent/config/logging.py | 7 +++++-- aries_cloudagent/utils/classloader.py | 22 ++++++++++++++-------- 2 files changed, 19 insertions(+), 10 deletions(-) diff --git a/aries_cloudagent/config/logging.py b/aries_cloudagent/config/logging.py index fa1a0db419..2e3661559e 100644 --- a/aries_cloudagent/config/logging.py +++ b/aries_cloudagent/config/logging.py @@ -8,7 +8,7 @@ import sys import yaml import time as mod_time -import pkg_resources +from importlib import resources from contextvars import ContextVar from datetime import datetime, timedelta @@ -69,9 +69,12 @@ def load_resource(path: str, encoding: str = None) -> TextIO: components = path.rsplit(":", 1) try: if len(components) == 1: + # Local filesystem resource return open(components[0], encoding=encoding) else: - bstream = pkg_resources.resource_stream(components[0], components[1]) + # Package resource + package, resource = components + bstream = resources.open_binary(package, resource) if encoding: return io.TextIOWrapper(bstream, encoding=encoding) return bstream diff --git a/aries_cloudagent/utils/classloader.py b/aries_cloudagent/utils/classloader.py index b2a24e62a3..f1644814ba 100644 --- a/aries_cloudagent/utils/classloader.py +++ b/aries_cloudagent/utils/classloader.py @@ -1,7 +1,8 @@ """The classloader provides utilties to dynamically load classes and modules.""" import inspect -import pkg_resources +from importlib import resources +from pathlib import Path import sys from importlib import import_module @@ -158,20 +159,25 @@ def load_subclass_of(cls, base_class: Type, mod_path: str, package: str = None): @classmethod def scan_subpackages(cls, package: str) -> Sequence[str]: """Return a list of sub-packages defined under a named package.""" - # FIXME use importlib.resources in python 3.7 if "." in package: package, sub_pkg = package.split(".", 1) else: sub_pkg = "." - if not pkg_resources.resource_isdir(package, sub_pkg): + + try: + package_path = resources.files(package) + except FileNotFoundError: raise ModuleLoadError(f"Undefined package {package}") + + if not (package_path / sub_pkg).is_dir(): + raise ModuleLoadError(f"Undefined package {package}") + found = [] joiner = "" if sub_pkg == "." else f"{sub_pkg}." - for sub_path in pkg_resources.resource_listdir(package, sub_pkg): - if pkg_resources.resource_exists( - package, f"{sub_pkg}/{sub_path}/__init__.py" - ): - found.append(f"{package}.{joiner}{sub_path}") + sub_path = package_path / sub_pkg + for item in sub_path.iterdir(): + if (item / "__init__.py").exists(): + found.append(f"{package}.{joiner}{item.name}") return found From 3bc373557013a4251ea8a634ed6433fbd137ecb1 Mon Sep 17 00:00:00 2001 From: ff137 Date: Fri, 2 Feb 2024 12:34:23 +0200 Subject: [PATCH 02/69] :recycle: Refactor tests to accommodate for pkg_resources being replaced with importlib Signed-off-by: ff137 --- aries_cloudagent/config/tests/test_logging.py | 26 ++++++++++++------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/aries_cloudagent/config/tests/test_logging.py b/aries_cloudagent/config/tests/test_logging.py index 7d8d220a0c..f1c08cc3b7 100644 --- a/aries_cloudagent/config/tests/test_logging.py +++ b/aries_cloudagent/config/tests/test_logging.py @@ -136,19 +136,27 @@ def test_banner_did(self): assert test_did in output def test_load_resource(self): + # Testing local file access with mock.patch("builtins.open", mock.MagicMock()) as mock_open: test_module.load_resource("abc", encoding="utf-8") + mock_open.assert_called_once() # Verify if open was called correctly mock_open.side_effect = IOError("insufficient privilege") - test_module.load_resource("abc", encoding="utf-8") - - with mock.patch.object( - test_module.pkg_resources, "resource_stream", mock.MagicMock() - ) as mock_res_stream, mock.patch.object( - test_module.io, "TextIOWrapper", mock.MagicMock() + with self.assertRaises(IOError): + test_module.load_resource("abc", encoding="utf-8") + + # Testing package resource access with encoding (text mode) + with mock.patch( + "importlib.resources.open_binary", mock.MagicMock() + ) as mock_open_binary, mock.patch( + "io.TextIOWrapper", mock.MagicMock() ) as mock_text_io_wrapper: test_module.load_resource("abc:def", encoding="utf-8") + mock_open_binary.assert_called_once_with("abc", "def") + mock_text_io_wrapper.assert_called_once() - with mock.patch.object( - test_module.pkg_resources, "resource_stream", mock.MagicMock() - ) as mock_res_stream: + # Testing package resource access without encoding (binary mode) + with mock.patch( + "importlib.resources.open_binary", mock.MagicMock() + ) as mock_open_binary: test_module.load_resource("abc:def", encoding=None) + mock_open_binary.assert_called_once_with("abc", "def") From 3f05059147d1a0f8de040cda78deabf01f8bddca Mon Sep 17 00:00:00 2001 From: ff137 Date: Fri, 2 Feb 2024 12:35:39 +0200 Subject: [PATCH 03/69] :art: remove unused import Signed-off-by: ff137 --- aries_cloudagent/utils/classloader.py | 1 - 1 file changed, 1 deletion(-) diff --git a/aries_cloudagent/utils/classloader.py b/aries_cloudagent/utils/classloader.py index f1644814ba..12a4d6fe01 100644 --- a/aries_cloudagent/utils/classloader.py +++ b/aries_cloudagent/utils/classloader.py @@ -2,7 +2,6 @@ import inspect from importlib import resources -from pathlib import Path import sys from importlib import import_module From 52e671b20633ad238d9fca0bfdad828cecd1a056 Mon Sep 17 00:00:00 2001 From: ff137 Date: Fri, 2 Feb 2024 12:59:22 +0200 Subject: [PATCH 04/69] :art: remove the return type Signed-off-by: ff137 --- aries_cloudagent/config/logging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aries_cloudagent/config/logging.py b/aries_cloudagent/config/logging.py index 2e3661559e..760173e835 100644 --- a/aries_cloudagent/config/logging.py +++ b/aries_cloudagent/config/logging.py @@ -58,7 +58,7 @@ def filter(self, record): return True -def load_resource(path: str, encoding: str = None) -> TextIO: +def load_resource(path: str, encoding: str = None): """Open a resource file located in a python package or the local filesystem. Args: From 3f74fad59a4813299e7fc61b9cf96ce3712df3e0 Mon Sep 17 00:00:00 2001 From: ff137 Date: Fri, 2 Feb 2024 12:59:40 +0200 Subject: [PATCH 05/69] :art: fix test Signed-off-by: ff137 --- aries_cloudagent/config/tests/test_logging.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/aries_cloudagent/config/tests/test_logging.py b/aries_cloudagent/config/tests/test_logging.py index f1c08cc3b7..8ae25bad2b 100644 --- a/aries_cloudagent/config/tests/test_logging.py +++ b/aries_cloudagent/config/tests/test_logging.py @@ -139,10 +139,9 @@ def test_load_resource(self): # Testing local file access with mock.patch("builtins.open", mock.MagicMock()) as mock_open: test_module.load_resource("abc", encoding="utf-8") - mock_open.assert_called_once() # Verify if open was called correctly mock_open.side_effect = IOError("insufficient privilege") - with self.assertRaises(IOError): - test_module.load_resource("abc", encoding="utf-8") + # load_resource should absorb IOError + test_module.load_resource("abc", encoding="utf-8") # Testing package resource access with encoding (text mode) with mock.patch( From fc9894d6a621fd574f0530f6285773b194576015 Mon Sep 17 00:00:00 2001 From: ff137 Date: Fri, 2 Feb 2024 13:08:45 +0200 Subject: [PATCH 06/69] :art: Signed-off-by: ff137 --- aries_cloudagent/config/logging.py | 1 - 1 file changed, 1 deletion(-) diff --git a/aries_cloudagent/config/logging.py b/aries_cloudagent/config/logging.py index 760173e835..149d5456b5 100644 --- a/aries_cloudagent/config/logging.py +++ b/aries_cloudagent/config/logging.py @@ -21,7 +21,6 @@ ) from logging.handlers import BaseRotatingHandler from random import randint -from typing import TextIO from portalocker import LOCK_EX, lock, unlock from pythonjsonlogger import jsonlogger From af87e1eec25e6a1c1cf5c751d80fb926d2817d1e Mon Sep 17 00:00:00 2001 From: ff137 Date: Fri, 2 Feb 2024 15:39:45 +0200 Subject: [PATCH 07/69] :art: Fix Marshmallow deprecation warnings by introducing metadata field Signed-off-by: ff137 --- .../anoncreds/models/anoncreds_cred_def.py | 75 ++++++++++++------- .../anoncreds/models/anoncreds_revocation.py | 59 ++++++++++----- .../anoncreds/models/anoncreds_schema.py | 28 ++++--- aries_cloudagent/anoncreds/routes.py | 28 ++++--- .../protocols/didexchange/v1_0/routes.py | 6 +- .../models/issuer_cred_rev_record.py | 6 +- 6 files changed, 133 insertions(+), 69 deletions(-) diff --git a/aries_cloudagent/anoncreds/models/anoncreds_cred_def.py b/aries_cloudagent/anoncreds/models/anoncreds_cred_def.py index b9e038ea9d..0630114bcc 100644 --- a/aries_cloudagent/anoncreds/models/anoncreds_cred_def.py +++ b/aries_cloudagent/anoncreds/models/anoncreds_cred_def.py @@ -16,7 +16,10 @@ NUM_STR_WHOLE_VALIDATE, ) -NUM_STR_WHOLE = {"validate": NUM_STR_WHOLE_VALIDATE, "example": NUM_STR_WHOLE_EXAMPLE} +NUM_STR_WHOLE = { + "validate": NUM_STR_WHOLE_VALIDATE, + "metadata": {"example": NUM_STR_WHOLE_EXAMPLE}, +} class CredDefValuePrimary(BaseModel): @@ -126,17 +129,27 @@ class Meta: model_class = CredDefValueRevocation unknown = EXCLUDE - g = fields.Str(example="1 1F14F&ECB578F 2 095E45DDF417D") - g_dash = fields.Str(example="1 1D64716fCDC00C 1 0C781960FA66E3D3 2 095E45DDF417D") - h = fields.Str(example="1 16675DAE54BFAE8 2 095E45DD417D") - h0 = fields.Str(example="1 21E5EF9476EAF18 2 095E45DDF417D") - h1 = fields.Str(example="1 236D1D99236090 2 095E45DDF417D") - h2 = fields.Str(example="1 1C3AE8D1F1E277 2 095E45DDF417D") - htilde = fields.Str(example="1 1D8549E8C0F8 2 095E45DDF417D") - h_cap = fields.Str(example="1 1B2A32CF3167 1 2490FEBF6EE55 1 0000000000000000") - u = fields.Str(example="1 0C430AAB2B4710 1 1CB3A0932EE7E 1 0000000000000000") - pk = fields.Str(example="1 142CD5E5A7DC 1 153885BD903312 2 095E45DDF417D") - y = fields.Str(example="1 153558BD903312 2 095E45DDF417D 1 0000000000000000") + g = fields.Str(metadata={"example": "1 1F14F&ECB578F 2 095E45DDF417D"}) + g_dash = fields.Str( + metadata={"example": "1 1D64716fCDC00C 1 0C781960FA66E3D3 2 095E45DDF417D"} + ) + h = fields.Str(metadata={"example": "1 16675DAE54BFAE8 2 095E45DD417D"}) + h0 = fields.Str(metadata={"example": "1 21E5EF9476EAF18 2 095E45DDF417D"}) + h1 = fields.Str(metadata={"example": "1 236D1D99236090 2 095E45DDF417D"}) + h2 = fields.Str(metadata={"example": "1 1C3AE8D1F1E277 2 095E45DDF417D"}) + htilde = fields.Str(metadata={"example": "1 1D8549E8C0F8 2 095E45DDF417D"}) + h_cap = fields.Str( + metadata={"example": "1 1B2A32CF3167 1 2490FEBF6EE55 1 0000000000000000"} + ) + u = fields.Str( + metadata={"example": "1 0C430AAB2B4710 1 1CB3A0932EE7E 1 0000000000000000"} + ) + pk = fields.Str( + metadata={"example": "1 142CD5E5A7DC 1 153885BD903312 2 095E45DDF417D"} + ) + y = fields.Str( + metadata={"example": "1 153558BD903312 2 095E45DDF417D 1 0000000000000000"} + ) class CredDefValue(BaseModel): @@ -178,11 +191,11 @@ class Meta: primary = fields.Nested( CredDefValuePrimarySchema(), - description="Primary value for credential definition", + metadata={"description": "Primary value for credential definition"}, ) revocation = fields.Nested( CredDefValueRevocationSchema(), - description="Revocation value for credential definition", + metadata={"description": "Revocation value for credential definition"}, required=False, ) @@ -243,20 +256,26 @@ class Meta: unknown = EXCLUDE issuer_id = fields.Str( - description="Issuer Identifier of the credential definition or schema", + metadata={ + "description": "Issuer Identifier of the credential definition or schema", + "example": INDY_OR_KEY_DID_EXAMPLE, + }, data_key="issuerId", - example=INDY_OR_KEY_DID_EXAMPLE, ) schema_id = fields.Str( data_key="schemaId", - description="Schema identifier", - example=INDY_SCHEMA_ID_EXAMPLE, + metadata={ + "description": "Schema identifier", + "example": INDY_SCHEMA_ID_EXAMPLE, + }, ) type = fields.Str(validate=OneOf(["CL"])) tag = fields.Str( - description="""The tag value passed in by the Issuer to + metadata={ + "description": """The tag value passed in by the Issuer to an AnonCred's Credential Definition create and store implementation.""", - example="default", + "example": "default", + } ) value = fields.Nested(CredDefValueSchema()) @@ -315,12 +334,14 @@ class Meta: ) ) credential_definition_id = fields.Str( - description="credential definition id", + metadata={ + "description": "credential definition id", + "example": INDY_CRED_DEF_ID_EXAMPLE, + }, allow_none=True, - example=INDY_CRED_DEF_ID_EXAMPLE, ) credential_definition = fields.Nested( - CredDefSchema(), description="credential definition" + CredDefSchema(), metadata={"description": "credential definition"} ) @@ -418,11 +439,13 @@ class Meta: unknown = EXCLUDE credential_definition_id = fields.Str( - description="credential definition id", - example=INDY_CRED_DEF_ID_EXAMPLE, + metadata={ + "description": "credential definition id", + "example": INDY_CRED_DEF_ID_EXAMPLE, + }, ) credential_definition = fields.Nested( - CredDefSchema(), description="credential definition" + CredDefSchema(), metadata={"description": "credential definition"} ) resolution_metadata = fields.Dict() credential_definitions_metadata = fields.Dict() diff --git a/aries_cloudagent/anoncreds/models/anoncreds_revocation.py b/aries_cloudagent/anoncreds/models/anoncreds_revocation.py index b60eefafa2..3a257749c4 100644 --- a/aries_cloudagent/anoncreds/models/anoncreds_revocation.py +++ b/aries_cloudagent/anoncreds/models/anoncreds_revocation.py @@ -130,18 +130,25 @@ class Meta: unknown = EXCLUDE issuer_id = fields.Str( - description="Issuer Identifier of the credential definition or schema", + metadata={ + "description": "Issuer Identifier of the credential definition or schema", + "example": INDY_OR_KEY_DID_EXAMPLE, + }, data_key="issuerId", - example=INDY_OR_KEY_DID_EXAMPLE, ) type = fields.Str(data_key="revocDefType") cred_def_id = fields.Str( - description="Credential definition identifier", + metadata={ + "description": "Credential definition identifier", + "example": INDY_CRED_DEF_ID_EXAMPLE, + }, data_key="credDefId", - example=INDY_CRED_DEF_ID_EXAMPLE, ) tag = fields.Str( - description="tag for the revocation registry definition", example="default" + metadata={ + "description": "tag for the revocation registry definition", + "example": "default", + } ) value = fields.Nested(RevRegDefValueSchema()) @@ -204,11 +211,13 @@ class Meta: ) ) revocation_registry_definition_id = fields.Str( - description="revocation registry definition id", - example=INDY_REV_REG_ID_EXAMPLE, + metadata={ + "description": "revocation registry definition id", + "example": INDY_REV_REG_ID_EXAMPLE, + } ) revocation_registry_definition = fields.Nested( - RevRegDefSchema(), description="revocation registry definition" + RevRegDefSchema(), metadata={"description": "revocation registry definition"} ) @@ -381,30 +390,40 @@ class Meta: unknown = EXCLUDE issuer_id = fields.Str( - description="Issuer Identifier of the credential definition or schema", + metadata={ + "description": "Issuer Identifier of the credential definition or schema", + "example": INDY_OR_KEY_DID_EXAMPLE, + }, data_key="issuerId", - example=INDY_OR_KEY_DID_EXAMPLE, ) rev_reg_def_id = fields.Str( - description="The ID of the revocation registry definition", + metadata={ + "description": "The ID of the revocation registry definition", + "example": INDY_REV_REG_ID_EXAMPLE, + }, data_key="revRegDefId", - example=INDY_REV_REG_ID_EXAMPLE, ) revocation_list = fields.List( fields.Int(), - description="Bit list representing revoked credentials", + metadata={ + "description": "Bit list representing revoked credentials", + "example": [0, 1, 1, 0], + }, data_key="revocationList", - example=[0, 1, 1, 0], ) current_accumulator = fields.Str( - description="The current accumalator value", - example="21 118...1FB", + metadata={ + "description": "The current accumalator value", + "example": "21 118...1FB", + }, data_key="currentAccumulator", ) timestamp = fields.Int( - description="Timestamp at which revocation list is applicable", + metadata={ + "description": "Timestamp at which revocation list is applicable", + "example": INDY_ISO8601_DATETIME_EXAMPLE, + }, required=False, - example=INDY_ISO8601_DATETIME_EXAMPLE, ) @@ -458,7 +477,9 @@ class Meta: ] ) ) - revocation_list = fields.Nested(RevListSchema(), description="revocation list") + revocation_list = fields.Nested( + RevListSchema(), metadata={"description": "revocation list"} + ) class RevListResult(BaseModel): diff --git a/aries_cloudagent/anoncreds/models/anoncreds_schema.py b/aries_cloudagent/anoncreds/models/anoncreds_schema.py index 58cb4a06f2..c9190239fa 100644 --- a/aries_cloudagent/anoncreds/models/anoncreds_schema.py +++ b/aries_cloudagent/anoncreds/models/anoncreds_schema.py @@ -61,20 +61,26 @@ class Meta: unknown = EXCLUDE issuer_id = fields.Str( - description="Issuer Identifier of the credential definition or schema", + metadata={ + "description": "Issuer Identifier of the credential definition or schema", + "example": INDY_OR_KEY_DID_EXAMPLE, + }, data_key="issuerId", - example=INDY_OR_KEY_DID_EXAMPLE, ) attr_names = fields.List( fields.Str( - description="Attribute name", - example="score", + metadata={ + "description": "Attribute name", + "example": "score", + } ), - description="Schema attribute names", + metadata={"description": "Schema attribute names"}, data_key="attrNames", ) - name = fields.Str(description="Schema name", example="Example schema") - version = fields.Str(description="Schema version", example="1.0") + name = fields.Str( + metadata={"description": "Schema name", "example": "Example schema"} + ) + version = fields.Str(metadata={"description": "Schema version", "example": "1.0"}) class GetSchemaResult(BaseModel): @@ -130,7 +136,7 @@ class Meta: schema_value = fields.Nested(AnonCredsSchemaSchema(), data_key="schema") schema_id = fields.Str( - description="Schema identifier", example=INDY_SCHEMA_ID_EXAMPLE + metadata={"description": "Schema identifier", "example": INDY_SCHEMA_ID_EXAMPLE} ) resolution_metadata = fields.Dict() schema_metadata = fields.Dict() @@ -184,8 +190,10 @@ class Meta: ) ) schema_id = fields.Str( - description="Schema identifier", - example=INDY_SCHEMA_ID_EXAMPLE, + metadata={ + "description": "Schema identifier", + "example": INDY_SCHEMA_ID_EXAMPLE, + } ) schema_value = fields.Nested(AnonCredsSchemaSchema(), data_key="schema") diff --git a/aries_cloudagent/anoncreds/routes.py b/aries_cloudagent/anoncreds/routes.py index 2931c18807..1edc0607a3 100644 --- a/aries_cloudagent/anoncreds/routes.py +++ b/aries_cloudagent/anoncreds/routes.py @@ -447,8 +447,10 @@ class GetCredDefsResponseSchema(OpenAPISchema): credential_definition_ids = fields.List( fields.Str( - description="credential definition identifiers", - example="GvLGiRogTJubmj5B36qhYz:3:CL:8:faber.agent.degree_schema", + metadata={ + "description": "credential definition identifiers", + "example": "GvLGiRogTJubmj5B36qhYz:3:CL:8:faber.agent.degree_schema", + } ) ) @@ -482,20 +484,28 @@ class InnerRevRegDefSchema(OpenAPISchema): """Request schema for revocation registry creation request.""" issuer_id = fields.Str( - description="Issuer Identifier of the credential definition or schema", + metadata={ + "description": "Issuer Identifier of the credential definition or schema", + "example": INDY_OR_KEY_DID_EXAMPLE, + }, data_key="issuerId", - example=INDY_OR_KEY_DID_EXAMPLE, ) cred_def_id = fields.Str( - description="Credential definition identifier", + metadata={ + "description": "Credential definition identifier", + "example": INDY_SCHEMA_ID_EXAMPLE, + }, data_key="credDefId", - example=INDY_SCHEMA_ID_EXAMPLE, ) - tag = fields.Str(description="tag for revocation registry", example="default") + tag = fields.Str( + metadata={"description": "tag for revocation registry", "example": "default"} + ) max_cred_num = fields.Int( - description="Maximum number of credential revocations per registry", + metadata={ + "description": "Maximum number of credential revocations per registry", + "example": 777, + }, data_key="maxCredNum", - example=666, ) diff --git a/aries_cloudagent/protocols/didexchange/v1_0/routes.py b/aries_cloudagent/protocols/didexchange/v1_0/routes.py index 0c7b90cd7c..4c3197bd57 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/routes.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/routes.py @@ -185,9 +185,11 @@ class DIDXRejectRequestSchema(OpenAPISchema): """Parameters and validators for reject-request request string.""" reason = fields.Str( - description="Reason for rejecting the DID Exchange", + metadata={ + "description": "Reason for rejecting the DID Exchange", + "example": "Request rejected", + }, required=False, - example="Request rejected", ) diff --git a/aries_cloudagent/revocation_anoncreds/models/issuer_cred_rev_record.py b/aries_cloudagent/revocation_anoncreds/models/issuer_cred_rev_record.py index 49a68b9209..bab3909b83 100644 --- a/aries_cloudagent/revocation_anoncreds/models/issuer_cred_rev_record.py +++ b/aries_cloudagent/revocation_anoncreds/models/issuer_cred_rev_record.py @@ -154,15 +154,15 @@ class Meta: ) rev_reg_id = fields.Str( required=False, - description="Revocation registry identifier", + metadata={"description": "Revocation registry identifier"}, ) cred_def_id = fields.Str( required=False, - description="Credential definition identifier", + metadata={"description": "Credential definition identifier"}, ) cred_rev_id = fields.Str( required=False, - description="Credential revocation identifier", + metadata={"description": "Credential revocation identifier"}, ) cred_ex_version = fields.Str( required=False, metadata={"description": "Credential exchange version"} From b6ff481071554ac22cf970721be51ce2dc0c4455 Mon Sep 17 00:00:00 2001 From: ff137 Date: Fri, 2 Feb 2024 16:02:18 +0200 Subject: [PATCH 08/69] :arrow_up: Upgrade `jsonpath-ng` Signed-off-by: ff137 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index d206d36eb1..c168a103fe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,7 @@ base58="~2.1.0" ConfigArgParse="~1.5.3" deepmerge="~0.3.0" ecdsa="~0.16.1" -jsonpath_ng="1.5.2" +jsonpath-ng="1.6.1" Markdown="~3.1.1" markupsafe="2.0.1" marshmallow="~3.20.1" From 1a49a0e2592f9fbd2ba2ba948a48b5abb9d45dae Mon Sep 17 00:00:00 2001 From: ff137 Date: Fri, 2 Feb 2024 16:05:27 +0200 Subject: [PATCH 09/69] Update lock file Signed-off-by: ff137 --- poetry.lock | 27 +++++++-------------------- 1 file changed, 7 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8081a9129a..0de5538f30 100644 --- a/poetry.lock +++ b/poetry.lock @@ -790,17 +790,6 @@ toolz = ">=0.8.0" [package.extras] cython = ["cython"] -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - [[package]] name = "deepmerge" version = "0.3.0" @@ -1203,33 +1192,31 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonpath-ng" -version = "1.5.2" +version = "1.6.1" description = "A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming." optional = false python-versions = "*" files = [ - {file = "jsonpath-ng-1.5.2.tar.gz", hash = "sha256:144d91379be14d9019f51973bd647719c877bfc07dc6f3f5068895765950c69d"}, - {file = "jsonpath_ng-1.5.2-py3-none-any.whl", hash = "sha256:93d1f248be68e485eb6635c3a01b2d681f296dc349d71e37c8755837b8944d36"}, + {file = "jsonpath-ng-1.6.1.tar.gz", hash = "sha256:086c37ba4917304850bd837aeab806670224d3f038fe2833ff593a672ef0a5fa"}, + {file = "jsonpath_ng-1.6.1-py3-none-any.whl", hash = "sha256:8f22cd8273d7772eea9aaa84d922e0841aa36fdb8a2c6b7f6c3791a16a9bc0be"}, ] [package.dependencies] -decorator = "*" ply = "*" -six = "*" [[package]] name = "jwcrypto" -version = "1.5.3" +version = "1.5.4" description = "Implementation of JOSE Web standards" optional = false python-versions = ">= 3.8" files = [ - {file = "jwcrypto-1.5.3.tar.gz", hash = "sha256:3af84bb6ed78fb29325308d4eca55e2842f1583010cb6c09207375a4ecea151f"}, + {file = "jwcrypto-1.5.4.tar.gz", hash = "sha256:0815fbab613db99bad85691da5f136f8860423396667728a264bcfa6e1db36b0"}, ] [package.dependencies] cryptography = ">=3.4" -typing_extensions = "*" +typing_extensions = ">=4.5.0" [[package]] name = "lxml" @@ -2777,4 +2764,4 @@ indy = ["python3-indy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "7170ac3cc281b82cf814d0757f2b7d0b94bf7009a58921ee9b6566dc51c957bd" +content-hash = "03d6243f318a33b1924161fd6a5458bf41954757f82da8e6bbb3f2a04a5998e0" From c73527d2b8061c4a774678a097a0910b4982235d Mon Sep 17 00:00:00 2001 From: ff137 Date: Fri, 2 Feb 2024 16:14:59 +0200 Subject: [PATCH 10/69] :arrow_up: Bump `rlp` to latest Signed-off-by: ff137 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index c168a103fe..54e5fc3121 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,7 @@ pytz="~2021.1" pyyaml="~6.0.1" qrcode = {version = ">=6.1,<7.0", extras = ["pil"]} requests="~2.31.0" -rlp="1.2.0" +rlp="4.0.0" unflatten="~0.1" sd-jwt = "^0.10.3" did-peer-2 = "^0.1.2" From c8386ec5ea25e8d5718bc6b8a78e6592a5c3cfaa Mon Sep 17 00:00:00 2001 From: ff137 Date: Fri, 2 Feb 2024 16:15:07 +0200 Subject: [PATCH 11/69] Update lock file Signed-off-by: ff137 --- poetry.lock | 77 +++++++++++++++++++++++++---------------------------- 1 file changed, 37 insertions(+), 40 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0de5538f30..38ea7cfe68 100644 --- a/poetry.lock +++ b/poetry.lock @@ -871,62 +871,59 @@ gmpy2 = ["gmpy2"] [[package]] name = "eth-hash" -version = "0.3.3" +version = "0.6.0" description = "eth-hash: The Ethereum hashing function, keccak256, sometimes (erroneously) called sha3" optional = false -python-versions = ">=3.5, <4" +python-versions = ">=3.8, <4" files = [ - {file = "eth-hash-0.3.3.tar.gz", hash = "sha256:8cde211519ff1a98b46e9057cb909f12ab62e263eb30a0a94e2f7e1f46ac67a0"}, - {file = "eth_hash-0.3.3-py3-none-any.whl", hash = "sha256:3c884e4f788b38cc92cff05c4e43bc6b82686066f04ecfae0e11cdcbe5a283bd"}, + {file = "eth-hash-0.6.0.tar.gz", hash = "sha256:ae72889e60db6acbb3872c288cfa02ed157f4c27630fcd7f9c8442302c31e478"}, + {file = "eth_hash-0.6.0-py3-none-any.whl", hash = "sha256:9f8daaa345764f8871dc461855049ac54ae4291d780279bce6fce7f24e3f17d3"}, ] [package.extras] -dev = ["Sphinx (>=1.6.5,<2)", "bumpversion (>=0.5.3,<1)", "flake8 (==3.7.9)", "ipython", "isort (>=4.2.15,<5)", "mypy (==0.770)", "pydocstyle (>=5.0.0,<6)", "pytest (==5.4.1)", "pytest-watch (>=4.1.0,<5)", "pytest-xdist", "sphinx-rtd-theme (>=0.1.9,<1)", "towncrier (>=19.2.0,<20)", "tox (==3.14.6)", "twine", "wheel"] -doc = ["Sphinx (>=1.6.5,<2)", "sphinx-rtd-theme (>=0.1.9,<1)", "towncrier (>=19.2.0,<20)"] -lint = ["flake8 (==3.7.9)", "isort (>=4.2.15,<5)", "mypy (==0.770)", "pydocstyle (>=5.0.0,<6)"] +dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] +docs = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] pycryptodome = ["pycryptodome (>=3.6.6,<4)"] -pysha3 = ["pysha3 (>=1.0.0,<2.0.0)"] -test = ["pytest (==5.4.1)", "pytest-xdist", "tox (==3.14.6)"] +pysha3 = ["pysha3 (>=1.0.0,<2.0.0)", "safe-pysha3 (>=1.0.0)"] +test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "eth-typing" -version = "2.3.0" +version = "4.0.0" description = "eth-typing: Common type annotations for ethereum python packages" optional = false -python-versions = ">=3.5, <4" +python-versions = ">=3.8, <4" files = [ - {file = "eth-typing-2.3.0.tar.gz", hash = "sha256:39cce97f401f082739b19258dfa3355101c64390914c73fe2b90012f443e0dc7"}, - {file = "eth_typing-2.3.0-py3-none-any.whl", hash = "sha256:b7fa58635c1cb0cbf538b2f5f1e66139575ea4853eac1d6000f0961a4b277422"}, + {file = "eth-typing-4.0.0.tar.gz", hash = "sha256:9af0b6beafbc5c2e18daf19da5f5a68315023172c4e79d149e12ad10a3d3f731"}, + {file = "eth_typing-4.0.0-py3-none-any.whl", hash = "sha256:7e556bea322b6e8c0a231547b736c258e10ce9eed5ddc254f51031b12af66a16"}, ] [package.extras] -dev = ["Sphinx (>=1.6.5,<2)", "bumpversion (>=0.5.3,<1)", "flake8 (==3.8.3)", "ipython", "isort (>=4.2.15,<5)", "mypy (==0.782)", "pydocstyle (>=3.0.0,<4)", "pytest (>=4.4,<4.5)", "pytest-watch (>=4.1.0,<5)", "pytest-xdist", "sphinx-rtd-theme (>=0.1.9)", "tox (>=2.9.1,<3)", "twine", "wheel"] -doc = ["Sphinx (>=1.6.5,<2)", "sphinx-rtd-theme (>=0.1.9)"] -lint = ["flake8 (==3.8.3)", "isort (>=4.2.15,<5)", "mypy (==0.782)", "pydocstyle (>=3.0.0,<4)"] -test = ["pytest (>=4.4,<4.5)", "pytest-xdist", "tox (>=2.9.1,<3)"] +dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] +docs = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] +test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "eth-utils" -version = "1.10.0" +version = "3.0.0" description = "eth-utils: Common utility functions for python code that interacts with Ethereum" optional = false -python-versions = ">=3.5,!=3.5.2,<4" +python-versions = ">=3.8, <4" files = [ - {file = "eth-utils-1.10.0.tar.gz", hash = "sha256:bf82762a46978714190b0370265a7148c954d3f0adaa31c6f085ea375e4c61af"}, - {file = "eth_utils-1.10.0-py3-none-any.whl", hash = "sha256:74240a8c6f652d085ed3c85f5f1654203d2f10ff9062f83b3bad0a12ff321c7a"}, + {file = "eth-utils-3.0.0.tar.gz", hash = "sha256:8721869568448349bceae63c277b75758d11e0dc190e7ef31e161b89619458f1"}, + {file = "eth_utils-3.0.0-py3-none-any.whl", hash = "sha256:9a284106acf6f6ce91ddf792489cf8bd4c681fd5ae7653d2f3d5d100be5c3905"}, ] [package.dependencies] -cytoolz = {version = ">=0.10.1,<1.0.0", markers = "implementation_name == \"cpython\""} -eth-hash = ">=0.3.1,<0.4.0" -eth-typing = ">=2.2.1,<3.0.0" -toolz = {version = ">0.8.2,<1", markers = "implementation_name == \"pypy\""} +cytoolz = {version = ">=0.10.1", markers = "implementation_name == \"cpython\""} +eth-hash = ">=0.3.1" +eth-typing = ">=3.0.0" +toolz = {version = ">0.8.2", markers = "implementation_name == \"pypy\""} [package.extras] -dev = ["Sphinx (>=1.6.5,<2)", "black (>=18.6b4,<19)", "bumpversion (>=0.5.3,<1)", "flake8 (==3.7.9)", "hypothesis (>=4.43.0,<5.0.0)", "ipython", "isort (>=4.2.15,<5)", "mypy (==0.720)", "pydocstyle (>=5.0.0,<6)", "pytest (==5.4.1)", "pytest (>=3.4.1,<4.0.0)", "pytest-watch (>=4.1.0,<5)", "pytest-xdist", "sphinx-rtd-theme (>=0.1.9,<2)", "towncrier (>=19.2.0,<20)", "tox (==3.14.6)", "twine (>=1.13,<2)", "wheel (>=0.30.0,<1.0.0)"] -doc = ["Sphinx (>=1.6.5,<2)", "sphinx-rtd-theme (>=0.1.9,<2)", "towncrier (>=19.2.0,<20)"] -lint = ["black (>=18.6b4,<19)", "flake8 (==3.7.9)", "isort (>=4.2.15,<5)", "mypy (==0.720)", "pydocstyle (>=5.0.0,<6)", "pytest (>=3.4.1,<4.0.0)"] -test = ["hypothesis (>=4.43.0,<5.0.0)", "pytest (==5.4.1)", "pytest-xdist", "tox (==3.14.6)"] +dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "eth-hash[pycryptodome]", "hypothesis (>=4.43.0)", "ipython", "mypy (==1.5.1)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] +docs = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] +test = ["hypothesis (>=4.43.0)", "mypy (==1.5.1)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "exceptiongroup" @@ -2241,23 +2238,23 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rlp" -version = "1.2.0" -description = "A package for Recursive Length Prefix encoding and decoding" +version = "4.0.0" +description = "rlp: A package for Recursive Length Prefix encoding and decoding" optional = false -python-versions = "*" +python-versions = ">=3.8, <4" files = [ - {file = "rlp-1.2.0-py2.py3-none-any.whl", hash = "sha256:97b7e770f16442772311b33e6bc28b45318e7c8def69b9df16452304e224e9df"}, - {file = "rlp-1.2.0.tar.gz", hash = "sha256:27273fc2dbc3513c1e05ea6b8af28aac8745fb09c164e39e2ed2807bf7e1b342"}, + {file = "rlp-4.0.0-py3-none-any.whl", hash = "sha256:1747fd933e054e6d25abfe591be92e19a4193a56c93981c05bd0f84dfe279f14"}, + {file = "rlp-4.0.0.tar.gz", hash = "sha256:61a5541f86e4684ab145cb849a5929d2ced8222930a570b3941cf4af16b72a78"}, ] [package.dependencies] -eth-utils = ">=1.0.2,<2" +eth-utils = ">=2" [package.extras] -dev = ["Sphinx (>=1.6.5,<2)", "bumpversion (>=0.5.3,<1)", "flake8 (==3.4.1)", "hypothesis (==3.56.5)", "ipython", "pytest (==3.3.2)", "pytest-watch (>=4.1.0,<5)", "pytest-xdist", "setuptools (>=36.2.0)", "sphinx-rtd-theme (>=0.1.9)", "tox (>=2.9.1,<3)", "twine", "wheel"] -doc = ["Sphinx (>=1.6.5,<2)", "sphinx-rtd-theme (>=0.1.9)"] -lint = ["flake8 (==3.4.1)"] -test = ["hypothesis (==3.56.5)", "pytest (==3.3.2)", "tox (>=2.9.1,<3)"] +dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "hypothesis (==5.19.0)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] +docs = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] +rust-backend = ["rusty-rlp (>=0.2.1,<0.3)"] +test = ["hypothesis (==5.19.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "ruff" @@ -2764,4 +2761,4 @@ indy = ["python3-indy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "03d6243f318a33b1924161fd6a5458bf41954757f82da8e6bbb3f2a04a5998e0" +content-hash = "d6262896db12a3f1c1bc4cb2039ea82799688e1ea89e7c6caf12114b14f1c982" From adcd45b36ce63d608254770b193bc24a135c5446 Mon Sep 17 00:00:00 2001 From: ff137 Date: Fri, 2 Feb 2024 17:42:34 +0200 Subject: [PATCH 12/69] :art: Fix Marshmallow deprecation warnings with metadata field Signed-off-by: ff137 --- .../anoncreds/models/anoncreds_revocation.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/aries_cloudagent/anoncreds/models/anoncreds_revocation.py b/aries_cloudagent/anoncreds/models/anoncreds_revocation.py index 3a257749c4..5fe66d8f0e 100644 --- a/aries_cloudagent/anoncreds/models/anoncreds_revocation.py +++ b/aries_cloudagent/anoncreds/models/anoncreds_revocation.py @@ -62,15 +62,18 @@ class Meta: unknown = EXCLUDE public_keys = fields.Dict( - data_key="publicKeys", example=INDY_RAW_PUBLIC_KEY_EXAMPLE + data_key="publicKeys", metadata={"example": INDY_RAW_PUBLIC_KEY_EXAMPLE} ) - max_cred_num = fields.Int(data_key="maxCredNum", example=666) + max_cred_num = fields.Int(data_key="maxCredNum", metadata={"example": 777}) tails_location = fields.Str( data_key="tailsLocation", - example="https://tails-server.com/hash/7Qen9RDyemMuV7xGQvp7NjwMSpyHieJyBakycxN7dX7P", + metadata={ + "example": "https://tails-server.com/hash/7Qen9RDyemMuV7xGQvp7NjwMSpyHieJyBakycxN7dX7P" + }, ) tails_hash = fields.Str( - data_key="tailsHash", example="7Qen9RDyemMuV7xGQvp7NjwMSpyHieJyBakycxN7dX7P" + data_key="tailsHash", + metadata={"example": "7Qen9RDyemMuV7xGQvp7NjwMSpyHieJyBakycxN7dX7P"}, ) From a865df71b9797d25f9f714ad0979569485792303 Mon Sep 17 00:00:00 2001 From: ff137 Date: Fri, 2 Feb 2024 17:50:19 +0200 Subject: [PATCH 13/69] :art: `send_webhook` tests to expect and absorb deprecation warning Signed-off-by: ff137 --- aries_cloudagent/admin/tests/test_admin_server.py | 5 +++-- aries_cloudagent/core/tests/test_dispatcher.py | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/aries_cloudagent/admin/tests/test_admin_server.py b/aries_cloudagent/admin/tests/test_admin_server.py index 7eae850288..72a4300e69 100644 --- a/aries_cloudagent/admin/tests/test_admin_server.py +++ b/aries_cloudagent/admin/tests/test_admin_server.py @@ -507,5 +507,6 @@ def _smaller_scope(): with pytest.raises(RuntimeError): await responder.send_outbound(None) - with pytest.raises(RuntimeError): - await responder.send_webhook("test", {}) + with pytest.deprecated_call(): + with pytest.raises(RuntimeError): + await responder.send_webhook("test", {}) diff --git a/aries_cloudagent/core/tests/test_dispatcher.py b/aries_cloudagent/core/tests/test_dispatcher.py index adceb7d5ec..0644c6d3fa 100644 --- a/aries_cloudagent/core/tests/test_dispatcher.py +++ b/aries_cloudagent/core/tests/test_dispatcher.py @@ -590,8 +590,9 @@ def _smaller_scope(): with self.assertRaises(RuntimeError): await responder.send_outbound(None) - with self.assertRaises(RuntimeError): - await responder.send_webhook("test", {}) + with pytest.deprecated_call(): + with self.assertRaises(RuntimeError): + await responder.send_webhook("test", {}) # async def test_dispatch_version_with_degraded_features(self): # profile = make_profile() From d1d276b18107fcec24fbe90c516d413dca813c35 Mon Sep 17 00:00:00 2001 From: ff137 Date: Fri, 2 Feb 2024 17:57:01 +0200 Subject: [PATCH 14/69] :arrow_up: Upgrade `Markdown` to latest Signed-off-by: ff137 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 54e5fc3121..eaec349daf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ ConfigArgParse="~1.5.3" deepmerge="~0.3.0" ecdsa="~0.16.1" jsonpath-ng="1.6.1" -Markdown="~3.1.1" +Markdown="~3.5.2" markupsafe="2.0.1" marshmallow="~3.20.1" nest_asyncio="~1.5.5" From b8a8e6868853da2685dc1718554ea333bba7d7f2 Mon Sep 17 00:00:00 2001 From: ff137 Date: Fri, 2 Feb 2024 17:59:14 +0200 Subject: [PATCH 15/69] Update lock file Signed-off-by: ff137 --- poetry.lock | 49 ++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 42 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 38ea7cfe68..cc6a1b1369 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1122,6 +1122,25 @@ files = [ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] +[[package]] +name = "importlib-metadata" +version = "7.0.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + [[package]] name = "indy-credx" version = "1.1.1" @@ -1310,19 +1329,20 @@ source = ["Cython (>=3.0.7)"] [[package]] name = "markdown" -version = "3.1.1" -description = "Python implementation of Markdown." +version = "3.5.2" +description = "Python implementation of John Gruber's Markdown." optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +python-versions = ">=3.8" files = [ - {file = "Markdown-3.1.1-py2.py3-none-any.whl", hash = "sha256:56a46ac655704b91e5b7e6326ce43d5ef72411376588afa1dd90e881b83c7e8c"}, - {file = "Markdown-3.1.1.tar.gz", hash = "sha256:2e50876bcdd74517e7b71f3e7a76102050edec255b3983403f1a63e7c8a41e7a"}, + {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, + {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, ] [package.dependencies] -setuptools = ">=36" +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} [package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] [[package]] @@ -2753,6 +2773,21 @@ files = [ idna = ">=2.0" multidict = ">=4.0" +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + [extras] askar = ["anoncreds", "aries-askar", "indy-credx", "indy-vdr"] bbs = ["ursa-bbs-signatures"] @@ -2761,4 +2796,4 @@ indy = ["python3-indy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "d6262896db12a3f1c1bc4cb2039ea82799688e1ea89e7c6caf12114b14f1c982" +content-hash = "e09d129a9c0ef3a42156af4a26400519bc9b66e69ceeb71334bfcf64d4a0bf25" From 97640e1b51ec1c22e58de50dcea142299aa31d77 Mon Sep 17 00:00:00 2001 From: ff137 Date: Wed, 7 Feb 2024 23:30:28 +0200 Subject: [PATCH 16/69] :art: replace usage of deprecated .warn method with .warning Signed-off-by: ff137 --- aries_cloudagent/anoncreds/default/legacy_indy/registry.py | 6 +++--- aries_cloudagent/anoncreds/revocation.py | 6 +++--- aries_cloudagent/indy/credx/issuer.py | 6 +++--- aries_cloudagent/revocation/models/issuer_rev_reg_record.py | 6 +++--- aries_cloudagent/revocation/recover.py | 2 +- aries_cloudagent/revocation_anoncreds/recover.py | 2 +- 6 files changed, 14 insertions(+), 14 deletions(-) diff --git a/aries_cloudagent/anoncreds/default/legacy_indy/registry.py b/aries_cloudagent/anoncreds/default/legacy_indy/registry.py index 3be3cab976..c302f2348d 100644 --- a/aries_cloudagent/anoncreds/default/legacy_indy/registry.py +++ b/aries_cloudagent/anoncreds/default/legacy_indy/registry.py @@ -796,8 +796,8 @@ async def _revoc_reg_entry_with_fix( # Ledger rejected transaction request: client request invalid: # InvalidClientRequest(...) # In this scenario we try to post a correction - LOGGER.warn("Retry ledger update/fix due to error") - LOGGER.warn(err) + LOGGER.warning("Retry ledger update/fix due to error") + LOGGER.warning(err) (_, _, rev_entry_res) = await self.fix_ledger_entry( profile, rev_list, @@ -806,7 +806,7 @@ async def _revoc_reg_entry_with_fix( write_ledger, endorser_did, ) - LOGGER.warn("Ledger update/fix applied") + LOGGER.warning("Ledger update/fix applied") elif "InvalidClientTaaAcceptanceError" in err.roll_up: # if no write access (with "InvalidClientTaaAcceptanceError") # e.g. aries_cloudagent.ledger.error.LedgerTransactionError: diff --git a/aries_cloudagent/anoncreds/revocation.py b/aries_cloudagent/anoncreds/revocation.py index c596c8a0e9..0b9891b4dc 100644 --- a/aries_cloudagent/anoncreds/revocation.py +++ b/aries_cloudagent/anoncreds/revocation.py @@ -1205,7 +1205,7 @@ async def revoke_pending_credentials( ) failed_crids.add(rev_id) elif rev_id >= rev_info["next_index"]: - LOGGER.warn( + LOGGER.warning( "Skipping requested credential revocation" "on rev reg id %s, cred rev id=%s not yet issued", revoc_reg_id, @@ -1213,7 +1213,7 @@ async def revoke_pending_credentials( ) failed_crids.add(rev_id) elif rev_list.revocation_list[rev_id] == 1: - LOGGER.warn( + LOGGER.warning( "Skipping requested credential revocation" "on rev reg id %s, cred rev id=%s already revoked", revoc_reg_id, @@ -1255,7 +1255,7 @@ async def revoke_pending_credentials( CATEGORY_REV_LIST, revoc_reg_id, for_update=True ) if not rev_info_upd: - LOGGER.warn( + LOGGER.warning( "Revocation registry missing, skipping update: {}", revoc_reg_id, ) diff --git a/aries_cloudagent/indy/credx/issuer.py b/aries_cloudagent/indy/credx/issuer.py index 6150d9d9ca..26843c0428 100644 --- a/aries_cloudagent/indy/credx/issuer.py +++ b/aries_cloudagent/indy/credx/issuer.py @@ -456,7 +456,7 @@ async def revoke_credentials( ) failed_crids.add(rev_id) elif rev_id > rev_info["curr_id"]: - LOGGER.warn( + LOGGER.warning( "Skipping requested credential revocation" "on rev reg id %s, cred rev id=%s not yet issued", revoc_reg_id, @@ -464,7 +464,7 @@ async def revoke_credentials( ) failed_crids.add(rev_id) elif rev_id in used_ids: - LOGGER.warn( + LOGGER.warning( "Skipping requested credential revocation" "on rev reg id %s, cred rev id=%s already revoked", revoc_reg_id, @@ -500,7 +500,7 @@ async def revoke_credentials( CATEGORY_REV_REG_INFO, revoc_reg_id, for_update=True ) if not rev_reg_upd or not rev_reg_info: - LOGGER.warn( + LOGGER.warning( "Revocation registry missing, skipping update: {}", revoc_reg_id, ) diff --git a/aries_cloudagent/revocation/models/issuer_rev_reg_record.py b/aries_cloudagent/revocation/models/issuer_rev_reg_record.py index a027010937..60c1ffd1fc 100644 --- a/aries_cloudagent/revocation/models/issuer_rev_reg_record.py +++ b/aries_cloudagent/revocation/models/issuer_rev_reg_record.py @@ -330,15 +330,15 @@ async def send_entry( # Ledger rejected transaction request: client request invalid: # InvalidClientRequest(...) # In this scenario we try to post a correction - LOGGER.warn("Retry ledger update/fix due to error") - LOGGER.warn(err) + LOGGER.warning("Retry ledger update/fix due to error") + LOGGER.warning(err) (_, _, res) = await self.fix_ledger_entry( profile, True, ledger.pool.genesis_txns, ) rev_entry_res = {"result": res} - LOGGER.warn("Ledger update/fix applied") + LOGGER.warning("Ledger update/fix applied") elif "InvalidClientTaaAcceptanceError" in err.roll_up: # if no write access (with "InvalidClientTaaAcceptanceError") # e.g. aries_cloudagent.ledger.error.LedgerTransactionError: diff --git a/aries_cloudagent/revocation/recover.py b/aries_cloudagent/revocation/recover.py index f2bf38267c..eca105a1b4 100644 --- a/aries_cloudagent/revocation/recover.py +++ b/aries_cloudagent/revocation/recover.py @@ -100,7 +100,7 @@ async def generate_ledger_rrrecovery_txn( set_revoked = set(set_revoked) mismatch = prev_revoked - set_revoked if mismatch: - LOGGER.warn( + LOGGER.warning( "Credential index(es) revoked on the ledger, but not in wallet: %s", mismatch, ) diff --git a/aries_cloudagent/revocation_anoncreds/recover.py b/aries_cloudagent/revocation_anoncreds/recover.py index 2a891d265a..2d9eca8755 100644 --- a/aries_cloudagent/revocation_anoncreds/recover.py +++ b/aries_cloudagent/revocation_anoncreds/recover.py @@ -102,7 +102,7 @@ async def generate_ledger_rrrecovery_txn( set_revoked = set(set_revoked) mismatch = prev_revoked - set_revoked if mismatch: - LOGGER.warn( + LOGGER.warning( "Credential index(es) revoked on the ledger, but not in wallet: %s", mismatch, ) From 492acd337dfd598e7e78c2c89e07b4837725980b Mon Sep 17 00:00:00 2001 From: ff137 Date: Wed, 7 Feb 2024 23:33:32 +0200 Subject: [PATCH 17/69] :art: set test scope to module level Signed-off-by: ff137 --- aries_cloudagent/storage/tests/test_askar_storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aries_cloudagent/storage/tests/test_askar_storage.py b/aries_cloudagent/storage/tests/test_askar_storage.py index a7d9883273..f257c60180 100644 --- a/aries_cloudagent/storage/tests/test_askar_storage.py +++ b/aries_cloudagent/storage/tests/test_askar_storage.py @@ -358,7 +358,7 @@ async def test_postgres_wallet_storage_works(self): class TestAskarStorageSearchSession(IsolatedAsyncioTestCase): - @pytest.mark.asyncio + @pytest.mark.asyncio(scope="module") async def test_askar_storage_search_session(self): profile = "profileId" From 71ebce743685c0bf4fa4c975a2a1dcc6e4915a70 Mon Sep 17 00:00:00 2001 From: ff137 Date: Wed, 7 Feb 2024 23:35:31 +0200 Subject: [PATCH 18/69] :art: set test scope to module level Signed-off-by: ff137 --- .../v1_0/handlers/tests/test_response_handler.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/aries_cloudagent/protocols/didexchange/v1_0/handlers/tests/test_response_handler.py b/aries_cloudagent/protocols/didexchange/v1_0/handlers/tests/test_response_handler.py index 0fd802b2b1..87369fbaf2 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/handlers/tests/test_response_handler.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/handlers/tests/test_response_handler.py @@ -77,7 +77,7 @@ async def asyncSetUp(self): did_doc_attach=self.did_doc_attach, ) - @pytest.mark.asyncio + @pytest.mark.asyncio(scope="module") @mock.patch.object(test_module, "DIDXManager") async def test_called(self, mock_didx_mgr): mock_didx_mgr.return_value.accept_response = mock.CoroutineMock() @@ -91,7 +91,7 @@ async def test_called(self, mock_didx_mgr): ) assert not responder.messages - @pytest.mark.asyncio + @pytest.mark.asyncio(scope="module") @mock.patch.object(test_module, "DIDXManager") async def test_called_auto_ping(self, mock_didx_mgr): self.ctx.update_settings({"auto_ping_connection": True}) @@ -109,7 +109,7 @@ async def test_called_auto_ping(self, mock_didx_mgr): result, target = messages[0] assert isinstance(result, Ping) - @pytest.mark.asyncio + @pytest.mark.asyncio(scope="module") @mock.patch.object(test_module, "DIDXManager") @mock.patch.object(connection_target, "ConnectionTarget") async def test_problem_report(self, mock_conn_target, mock_didx_mgr): @@ -146,7 +146,7 @@ async def test_problem_report(self, mock_conn_target, mock_didx_mgr): ) assert target == {"target_list": [mock_conn_target]} - @pytest.mark.asyncio + @pytest.mark.asyncio(scope="module") @mock.patch.object(test_module, "DIDXManager") @mock.patch.object(connection_target, "ConnectionTarget") async def test_problem_report_did_doc( @@ -193,7 +193,7 @@ async def test_problem_report_did_doc( ) assert target == {"target_list": [mock_conn_target]} - @pytest.mark.asyncio + @pytest.mark.asyncio(scope="module") @mock.patch.object(test_module, "DIDXManager") @mock.patch.object(connection_target, "ConnectionTarget") async def test_problem_report_did_doc_no_conn_target( From 58f9490d216d295304d325d5285b9e4a79a19cda Mon Sep 17 00:00:00 2001 From: ff137 Date: Mon, 12 Feb 2024 11:24:18 +0200 Subject: [PATCH 19/69] :art: fix metadata containing required field Signed-off-by: ff137 --- aries_cloudagent/anoncreds/routes.py | 53 ++++++++++++++-------------- 1 file changed, 26 insertions(+), 27 deletions(-) diff --git a/aries_cloudagent/anoncreds/routes.py b/aries_cloudagent/anoncreds/routes.py index 1edc0607a3..e1eea30d33 100644 --- a/aries_cloudagent/anoncreds/routes.py +++ b/aries_cloudagent/anoncreds/routes.py @@ -86,17 +86,17 @@ class SchemaPostOptionSchema(OpenAPISchema): endorser_connection_id = fields.Str( metadata={ "description": endorser_connection_id_description, - "required": False, "example": UUIDFour.EXAMPLE, - } + }, + required=False, ) create_transaction_for_endorser = fields.Bool( metadata={ "description": create_transaction_for_endorser_description, - "required": False, "example": False, - } + }, + required=False, ) @@ -267,8 +267,8 @@ class CredIdMatchInfo(OpenAPISchema): metadata={ "description": "Credential definition identifier", "example": INDY_CRED_DEF_ID_EXAMPLE, - "required": True, - } + }, + required=True, ) @@ -279,23 +279,23 @@ class InnerCredDefSchema(OpenAPISchema): metadata={ "description": "Credential definition tag", "example": "default", - "required": True, - } + }, + required=True, ) schema_id = fields.Str( metadata={ "description": "Schema identifier", "example": INDY_SCHEMA_ID_EXAMPLE, - "required": True, }, + required=True, data_key="schemaId", ) issuer_id = fields.Str( metadata={ "description": "Issuer Identifier of the credential definition", "example": INDY_OR_KEY_DID_EXAMPLE, - "required": True, }, + required=True, data_key="issuerId", ) @@ -307,28 +307,27 @@ class CredDefPostOptionsSchema(OpenAPISchema): metadata={ "description": endorser_connection_id_description, "example": UUIDFour.EXAMPLE, - "required": False, - } + }, + required=False, ) create_transaction_for_endorser = fields.Bool( metadata={ "description": create_transaction_for_endorser_description, "example": False, - "required": False, - } + }, + required=False, ) support_revocation = fields.Bool( metadata={ "description": "Support credential revocation", - "required": False, - } + }, + required=False, ) revocation_registry_size = fields.Int( metadata={ "description": "Maximum number of credential revocations per registry", - "example": 666, - "required": False, - } + }, + required=False, ) @@ -516,15 +515,15 @@ class RevRegDefOptionsSchema(OpenAPISchema): metadata={ "description": endorser_connection_id_description, "example": UUIDFour.EXAMPLE, - "required": False, - } + }, + required=False, ) create_transaction_for_endorser = fields.Bool( metadata={ "description": create_transaction_for_endorser_description, "example": False, - "required": False, - } + }, + required=False, ) @@ -591,15 +590,15 @@ class RevListOptionsSchema(OpenAPISchema): metadata={ "description": endorser_connection_id_description, "example": UUIDFour.EXAMPLE, - "required": False, - } + }, + required=False, ) create_transaction_for_endorser = fields.Bool( metadata={ "description": create_transaction_for_endorser_description, "example": False, - "required": False, - } + }, + required=False, ) From 4df2984b7aa6629ac2c21eef76ed2797b3f8a843 Mon Sep 17 00:00:00 2001 From: ff137 Date: Mon, 12 Feb 2024 12:19:44 +0200 Subject: [PATCH 20/69] :art: fix marshmallow warning Signed-off-by: ff137 --- aries_cloudagent/anoncreds/routes.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/aries_cloudagent/anoncreds/routes.py b/aries_cloudagent/anoncreds/routes.py index e1eea30d33..6044b6d154 100644 --- a/aries_cloudagent/anoncreds/routes.py +++ b/aries_cloudagent/anoncreds/routes.py @@ -606,8 +606,10 @@ class RevListCreateRequestSchema(OpenAPISchema): """Request schema for revocation registry creation request.""" rev_reg_def_id = fields.Str( - description="Revocation registry definition identifier", - example=INDY_REV_REG_ID_EXAMPLE, + metadata={ + "description": "Revocation registry definition identifier", + "example": INDY_REV_REG_ID_EXAMPLE, + } ) options = fields.Nested(RevListOptionsSchema) From dfd829df5f9a485fd8e412edce4211d0fe2a80f0 Mon Sep 17 00:00:00 2001 From: ff137 Date: Mon, 12 Feb 2024 12:39:22 +0200 Subject: [PATCH 21/69] :art: ignore specific warnings from external packages Signed-off-by: ff137 --- aries_cloudagent/admin/tests/test_admin_server.py | 5 +++++ pyproject.toml | 3 +++ 2 files changed, 8 insertions(+) diff --git a/aries_cloudagent/admin/tests/test_admin_server.py b/aries_cloudagent/admin/tests/test_admin_server.py index 72a4300e69..300e82f758 100644 --- a/aries_cloudagent/admin/tests/test_admin_server.py +++ b/aries_cloudagent/admin/tests/test_admin_server.py @@ -20,6 +20,11 @@ from ..server import AdminServer, AdminSetupError +# Ignore Marshmallow warning, as well as 'NotAppKeyWarning' coming from apispec packages +@pytest.mark.filterwarnings( + "ignore:The 'missing' attribute of fields is deprecated. Use 'load_default' instead.", + "ignore:It is recommended to use web.AppKey instances for keys.", +) class TestAdminServer(IsolatedAsyncioTestCase): async def asyncSetUp(self): self.message_results = [] diff --git a/pyproject.toml b/pyproject.toml index eaec349daf..5f02f6482a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -139,6 +139,9 @@ markers = [ ] junit_family = "xunit1" asyncio_mode = "auto" +filterwarnings = [ + 'ignore:distutils Version classes are deprecated. Use packaging.version instead.:DeprecationWarning', # Ignore specific DeprecationWarning for old packages using distutils version class +] [tool.coverage.run] From dc63049f61339f94ab5cecf257f1c3dc27ba5760 Mon Sep 17 00:00:00 2001 From: Wade Barnes Date: Wed, 14 Feb 2024 09:00:05 -0800 Subject: [PATCH 22/69] Add Dependabot configuration - Configure Dependabot to automatically maintain dependencies for GitHub Actions. - Check for updates once a week. - Group all updates into a single PR. Signed-off-by: Wade Barnes --- .github/dependabot.yml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..cd4692b79f --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,14 @@ + # For details on how this file works refer to: + # - https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file +version: 2 +updates: + # Maintain dependencies for GitHub Actions + # - Check for updates once a week + # - Group all updates into a single PR + - package-ecosystem: github-actions + directory: / + schedule: + interval: weekly + groups: + all-actions: + patterns: [ "*" ] \ No newline at end of file From 8eceb59e787922eef43649b1ab126c12888e0ca4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 17:50:20 +0000 Subject: [PATCH 23/69] chore(deps): Bump the all-actions group with 10 updates Bumps the all-actions group with 10 updates: | Package | From | To | | --- | --- | --- | | [actions/checkout](https://github.com/actions/checkout) | `2` | `4` | | [actions/setup-python](https://github.com/actions/setup-python) | `4` | `5` | | [psf/black](https://github.com/psf/black) | `24.1.1` | `24.2.0` | | [github/codeql-action](https://github.com/github/codeql-action) | `2` | `3` | | [pypa/gh-action-pip-audit](https://github.com/pypa/gh-action-pip-audit) | `1.0.0` | `1.0.8` | | [actions/cache](https://github.com/actions/cache) | `3` | `4` | | [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) | `2` | `3` | | [docker/login-action](https://github.com/docker/login-action) | `2` | `3` | | [docker/metadata-action](https://github.com/docker/metadata-action) | `4` | `5` | | [docker/build-push-action](https://github.com/docker/build-push-action) | `3` | `5` | Updates `actions/checkout` from 2 to 4 - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v2...v4) Updates `actions/setup-python` from 4 to 5 - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) Updates `psf/black` from 24.1.1 to 24.2.0 - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/24.1.1...24.2.0) Updates `github/codeql-action` from 2 to 3 - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/v2...v3) Updates `pypa/gh-action-pip-audit` from 1.0.0 to 1.0.8 - [Release notes](https://github.com/pypa/gh-action-pip-audit/releases) - [Commits](https://github.com/pypa/gh-action-pip-audit/compare/v1.0.0...v1.0.8) Updates `actions/cache` from 3 to 4 - [Release notes](https://github.com/actions/cache/releases) - [Changelog](https://github.com/actions/cache/blob/main/RELEASES.md) - [Commits](https://github.com/actions/cache/compare/v3...v4) Updates `docker/setup-buildx-action` from 2 to 3 - [Release notes](https://github.com/docker/setup-buildx-action/releases) - [Commits](https://github.com/docker/setup-buildx-action/compare/v2...v3) Updates `docker/login-action` from 2 to 3 - [Release notes](https://github.com/docker/login-action/releases) - [Commits](https://github.com/docker/login-action/compare/v2...v3) Updates `docker/metadata-action` from 4 to 5 - [Release notes](https://github.com/docker/metadata-action/releases) - [Upgrade guide](https://github.com/docker/metadata-action/blob/master/UPGRADE.md) - [Commits](https://github.com/docker/metadata-action/compare/v4...v5) Updates `docker/build-push-action` from 3 to 5 - [Release notes](https://github.com/docker/build-push-action/releases) - [Commits](https://github.com/docker/build-push-action/compare/v3...v5) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: psf/black dependency-type: direct:production update-type: version-update:semver-minor dependency-group: all-actions - dependency-name: github/codeql-action dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: pypa/gh-action-pip-audit dependency-type: direct:production update-type: version-update:semver-patch dependency-group: all-actions - dependency-name: actions/cache dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: docker/setup-buildx-action dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: docker/login-action dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: docker/metadata-action dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: docker/build-push-action dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions ... Signed-off-by: dependabot[bot] --- .github/workflows/blackformat.yml | 6 +++--- .github/workflows/codeql.yml | 6 +++--- .github/workflows/integrationtests.yml | 2 +- .github/workflows/nigthly.yml | 2 +- .github/workflows/pip-audit.yml | 4 ++-- .github/workflows/publish-indy.yml | 12 ++++++------ .github/workflows/publish.yml | 12 ++++++------ .github/workflows/pythonpublish.yml | 4 ++-- .github/workflows/snyk.yml | 4 ++-- .github/workflows/tests-indy.yml | 8 ++++---- .github/workflows/tests.yml | 4 ++-- 11 files changed, 32 insertions(+), 32 deletions(-) diff --git a/.github/workflows/blackformat.yml b/.github/workflows/blackformat.yml index 4474603ea5..39f2345bf3 100644 --- a/.github/workflows/blackformat.yml +++ b/.github/workflows/blackformat.yml @@ -10,11 +10,11 @@ jobs: name: lint runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: "3.9" - name: Black Code Formatter Check # The version of black should be adjusted at the same time dev # dependencies are updated. - uses: psf/black@24.1.1 + uses: psf/black@24.2.0 diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index e6f15917a0..e77074d757 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -17,13 +17,13 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/integrationtests.yml b/.github/workflows/integrationtests.yml index e7c5a23275..6d2fca9349 100644 --- a/.github/workflows/integrationtests.yml +++ b/.github/workflows/integrationtests.yml @@ -20,7 +20,7 @@ jobs: if: (github.event_name == 'pull_request' && github.repository == 'hyperledger/aries-cloudagent-python') || (github.event_name != 'pull_request') steps: - name: checkout-acapy - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: path: acapy #- name: run-von-network diff --git a/.github/workflows/nigthly.yml b/.github/workflows/nigthly.yml index 7af06f95a4..c6e01b95ce 100644 --- a/.github/workflows/nigthly.yml +++ b/.github/workflows/nigthly.yml @@ -26,7 +26,7 @@ jobs: commits_today: ${{ steps.commits.outputs.commits_today }} date: ${{ steps.date.outputs.date }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: print latest_commit run: echo ${{ github.sha }} - name: Get new commits diff --git a/.github/workflows/pip-audit.yml b/.github/workflows/pip-audit.yml index 486a36e0fb..5fa3be6f7e 100644 --- a/.github/workflows/pip-audit.yml +++ b/.github/workflows/pip-audit.yml @@ -11,14 +11,14 @@ jobs: runs-on: ubuntu-latest if: (github.event_name == 'pull_request' && github.repository == 'hyperledger/aries-cloudagent-python') || (github.event_name != 'pull_request') steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: install run: | python -m venv env/ source env/bin/activate python -m pip install --upgrade pip python -m pip install . - - uses: pypa/gh-action-pip-audit@v1.0.0 + - uses: pypa/gh-action-pip-audit@v1.0.8 with: virtual-environment: env/ local: true diff --git a/.github/workflows/publish-indy.yml b/.github/workflows/publish-indy.yml index 17f479c8e6..3d95cc6d84 100644 --- a/.github/workflows/publish-indy.yml +++ b/.github/workflows/publish-indy.yml @@ -51,7 +51,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Code - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ inputs.ref || '' }} @@ -61,7 +61,7 @@ jobs: echo "repo-owner=${GITHUB_REPOSITORY_OWNER,,}" >> $GITHUB_OUTPUT - name: Cache Docker layers - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: /tmp/.buildx-cache key: ${{ runner.os }}-buildx-${{ github.sha }} @@ -69,10 +69,10 @@ jobs: ${{ runner.os }}-buildx- - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Log in to the GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -80,7 +80,7 @@ jobs: - name: Setup Image Metadata id: meta - uses: docker/metadata-action@v4 + uses: docker/metadata-action@v5 with: images: | ghcr.io/${{ steps.info.outputs.repo-owner }}/aries-cloudagent-python @@ -88,7 +88,7 @@ jobs: type=raw,value=py${{ matrix.python-version }}-indy-${{ env.INDY_VERSION }}-${{ inputs.tag || github.event.release.tag_name }} - name: Build and Push Image to ghcr.io - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: push: true context: . diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 937c2e1bbd..8195dc1b7c 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -48,7 +48,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Code - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ inputs.ref || '' }} @@ -58,7 +58,7 @@ jobs: echo "repo-owner=${GITHUB_REPOSITORY_OWNER,,}" >> $GITHUB_OUTPUT - name: Cache Docker layers - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: /tmp/.buildx-cache key: ${{ runner.os }}-buildx-${{ github.sha }} @@ -66,10 +66,10 @@ jobs: ${{ runner.os }}-buildx- - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Log in to the GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -77,7 +77,7 @@ jobs: - name: Setup Image Metadata id: meta - uses: docker/metadata-action@v4 + uses: docker/metadata-action@v5 with: images: | ghcr.io/${{ steps.info.outputs.repo-owner }}/aries-cloudagent-python @@ -85,7 +85,7 @@ jobs: type=raw,value=py${{ matrix.python-version }}-${{ inputs.tag || github.event.release.tag_name }} - name: Build and Push Image to ghcr.io - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: push: true context: . diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index 5e7ebfb330..8211541fbe 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -8,9 +8,9 @@ jobs: deploy: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.x" - name: Install dependencies diff --git a/.github/workflows/snyk.yml b/.github/workflows/snyk.yml index 7160212071..30d997d594 100644 --- a/.github/workflows/snyk.yml +++ b/.github/workflows/snyk.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest if: ${{ github.repository_owner == 'hyperledger' }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Build a Docker image run: docker build -t aries-cloudagent -f docker/Dockerfile . - name: Run Snyk to check Docker image for vulnerabilities @@ -28,6 +28,6 @@ jobs: image: aries-cloudagent args: --file=docker/Dockerfile - name: Upload result to GitHub Code Scanning - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: snyk.sarif diff --git a/.github/workflows/tests-indy.yml b/.github/workflows/tests-indy.yml index 7e69e76b30..8b7651a39f 100644 --- a/.github/workflows/tests-indy.yml +++ b/.github/workflows/tests-indy.yml @@ -18,10 +18,10 @@ jobs: name: Test Python ${{ inputs.python-version }} on Indy ${{ inputs.indy-version }} runs-on: ${{ inputs.os }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Cache image layers - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: /tmp/.buildx-cache-test key: ${{ runner.os }}-buildx-test-${{ github.sha }} @@ -29,10 +29,10 @@ jobs: ${{ runner.os }}-buildx-test- - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Build test image - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: load: true context: . diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5fb610580b..62699408a3 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -15,9 +15,9 @@ jobs: name: Test Python ${{ inputs.python-version }} runs-on: ${{ inputs.os }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ inputs.python-version }} cache: 'pip' From 6401d8c4b3e99dc5d05614e83011235c62e103bc Mon Sep 17 00:00:00 2001 From: PatStLouis Date: Fri, 16 Feb 2024 23:44:49 +0000 Subject: [PATCH 24/69] change middleware registration order Signed-off-by: PatStLouis --- aries_cloudagent/admin/server.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/aries_cloudagent/admin/server.py b/aries_cloudagent/admin/server.py index f500a9d4c4..0d2e8fa655 100644 --- a/aries_cloudagent/admin/server.py +++ b/aries_cloudagent/admin/server.py @@ -299,7 +299,7 @@ def _matches_additional_routes(self, path: str) -> bool: async def make_application(self) -> web.Application: """Get the aiohttp application instance.""" - middlewares = [ready_middleware, debug_middleware, validation_middleware] + middlewares = [ready_middleware, debug_middleware] # admin-token and admin-token are mutually exclusive and required. # This should be enforced during parameter parsing but to be sure, @@ -452,6 +452,9 @@ async def setup_context(request: web.Request, handler): return await handler(request) middlewares.append(setup_context) + + # We register the validation_middleware last so we don't validate unauthorized requests + middlewares.append(validation_middleware) app = web.Application( middlewares=middlewares, From 5f4858aebbcbf741c00e3a22846d8ad459b4f07d Mon Sep 17 00:00:00 2001 From: PatStLouis Date: Fri, 16 Feb 2024 23:50:34 +0000 Subject: [PATCH 25/69] lint me alone! Signed-off-by: PatStLouis --- aries_cloudagent/admin/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aries_cloudagent/admin/server.py b/aries_cloudagent/admin/server.py index 0d2e8fa655..2d0249432a 100644 --- a/aries_cloudagent/admin/server.py +++ b/aries_cloudagent/admin/server.py @@ -452,7 +452,7 @@ async def setup_context(request: web.Request, handler): return await handler(request) middlewares.append(setup_context) - + # We register the validation_middleware last so we don't validate unauthorized requests middlewares.append(validation_middleware) From 2f92b4d173f01d119f8f56ce91ad2e759c366698 Mon Sep 17 00:00:00 2001 From: PatStLouis Date: Fri, 16 Feb 2024 23:57:04 +0000 Subject: [PATCH 26/69] shorten comment Signed-off-by: PatStLouis --- aries_cloudagent/admin/server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aries_cloudagent/admin/server.py b/aries_cloudagent/admin/server.py index 2d0249432a..2abf845328 100644 --- a/aries_cloudagent/admin/server.py +++ b/aries_cloudagent/admin/server.py @@ -452,8 +452,8 @@ async def setup_context(request: web.Request, handler): return await handler(request) middlewares.append(setup_context) - - # We register the validation_middleware last so we don't validate unauthorized requests + + # Register validation_middleware last avoiding unauthorized validations middlewares.append(validation_middleware) app = web.Application( From 9a3ec907a07318716ed5425cb11ef6110341050a Mon Sep 17 00:00:00 2001 From: PatStLouis Date: Sat, 17 Feb 2024 00:02:19 +0000 Subject: [PATCH 27/69] linting again... Signed-off-by: PatStLouis --- aries_cloudagent/admin/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aries_cloudagent/admin/server.py b/aries_cloudagent/admin/server.py index 2abf845328..946cb0baf2 100644 --- a/aries_cloudagent/admin/server.py +++ b/aries_cloudagent/admin/server.py @@ -452,7 +452,7 @@ async def setup_context(request: web.Request, handler): return await handler(request) middlewares.append(setup_context) - + # Register validation_middleware last avoiding unauthorized validations middlewares.append(validation_middleware) From 54ce86a70f89c9a914d5f37785968cf7d358917b Mon Sep 17 00:00:00 2001 From: Stephen Curran Date: Sat, 17 Feb 2024 00:42:24 +0000 Subject: [PATCH 28/69] 0.12.0rc1 Signed-off-by: Stephen Curran --- CHANGELOG.md | 48 +++++++++++++++++++++++++++++----- PUBLISHING.md | 48 +++++++++++++++------------------- docs/conf.py | 5 +++- docs/features/DIDResolution.md | 2 +- docs/features/SupportedRFCs.md | 4 ++- open-api/openapi.json | 20 +++++--------- open-api/swagger.json | 19 +++++--------- pyproject.toml | 2 +- scripts/generate-open-api-spec | 3 +-- 9 files changed, 84 insertions(+), 67 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5a0f30a8ec..397f0934b4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,24 +1,30 @@ # Aries Cloud Agent Python Changelog -## 0.12.0rc0 +## 0.12.0rc1 -### January 23, 2024 +### February 17, 2024 -Release 0.12.0 is a relative large release (53 PRs and counting...) but currently with no breaking changes. We expect there will be breaking changes (at least in the handling of endorsement) before the 0.12.0 release is finalized, hence the minor version update. +Release 0.12.0 is a relative large release but currently with no breaking changes. We expect there will be breaking changes (at least in the handling of endorsement) before the 0.12.0 release is finalized, hence the minor version update. + +The first `rc0` release candidate `rc0` introduced a regression via [PR \#2705] that has been reverted in [PR \#2789]. Further investigation is needed to determine how to accomplish the goal of [PR \#2705] ("feat: inject profile") without the regression. + +[PR \#2705]: https://github.com/hyperledger/aries-cloudagent-python/pull/2705 +[PR \#2789]: https://github.com/hyperledger/aries-cloudagent-python/pull/2789 Much progress was made on `did:peer` support in this release, with the handling of inbound [DID Peer] 1 added, and inbound and outbound support for DID Peer 2 and 4. The goal of that work is to eliminate the remaining places where "unqualified" DIDs remain. Work continues in supporting ledger agnostic [AnonCreds], and the new [Hyperledger AnonCreds Rust] library. Attention was also given in the release to the handling of JSON-LD [Data Integrity Verifiable Credentials], with more expected before the release is finalized. In addition to those updates, there were fixes and improvements across the codebase. +The most visible change in this release is the re-organization of the ACA-Py documentation, moving the vast majority of the documents to the folders within the `docs` folder -- a long overdue change that will allow us to soon publish the documents on [https://aca-py.org](https://aca-py.org) directly from the ACA-Py repository, rather than from the separate [aries-acapy-docs](https://github.com/hyperledger/aries-acapy-docs) currently being used. + [DID Peer]: https://identity.foundation/peer-did-method-spec/ [AnonCreds]: https://www.hyperledger.org/projects/anoncreds [Hyperledger AnonCreds Rust]: https://github.com/hyperledger/anoncreds-rs [Data Integrity Verifiable Credentials]: https://www.w3.org/TR/vc-data-integrity/ -### 0.12.0rc0 Breaking Changes +### 0.12.0rc1 Breaking Changes -There are no breaking changes in 0.12.0rc0. +There are no breaking changes in 0.12.0rc1. - -#### 0.12.0rc0 Categorized List of Pull Requests +#### 0.12.0rc1 Categorized List of Pull Requests - DID Handling and Connection Establishment Updates/Fixes - fix: save multi_use to the DB for OOB invitations [\#2694](https://github.com/hyperledger/aries-cloudagent-python/pull/2694) [frostyfrog](https://github.com/frostyfrog) @@ -35,6 +41,12 @@ There are no breaking changes in 0.12.0rc0. - feat: support resolving did:peer:1 received in did exchange [\#2611](https://github.com/hyperledger/aries-cloudagent-python/pull/2611) [dbluhm](https://github.com/dbluhm) - Ledger Agnostic AnonCreds RS Changes + - Anoncreds revoke and publish-revocations endorsement [\#2782](https://github.com/hyperledger/aries-cloudagent-python/pull/2782) [jamshale](https://github.com/jamshale) + - Upgrade anoncreds to version 0.2.0-dev11 [\#2763](https://github.com/hyperledger/aries-cloudagent-python/pull/2763) [jamshale](https://github.com/jamshale) + - Update anoncreds to 0.2.0-dev10 [\#2758](https://github.com/hyperledger/aries-cloudagent-python/pull/2758) [jamshale](https://github.com/jamshale) + - Anoncreds - Cred Def and Revocation Endorsement [\#2752](https://github.com/hyperledger/aries-cloudagent-python/pull/2752) [jamshale](https://github.com/jamshale) + - Upgrade anoncreds to 0.2.0-dev9 [\#2741](https://github.com/hyperledger/aries-cloudagent-python/pull/2741) [jamshale](https://github.com/jamshale) + - Upgrade anoncred-rs to version 0.2.0-dev8 [\#2734](https://github.com/hyperledger/aries-cloudagent-python/pull/2734) [jamshale](https://github.com/jamshale) - Upgrade anoncreds to 0.2.0.dev7 [\#2719](https://github.com/hyperledger/aries-cloudagent-python/pull/2719) [jamshale](https://github.com/jamshale) - Improve api documentation and error handling [\#2690](https://github.com/hyperledger/aries-cloudagent-python/pull/2690) [jamshale](https://github.com/jamshale) - Add unit tests for anoncreds revocation [\#2688](https://github.com/hyperledger/aries-cloudagent-python/pull/2688) [jamshale](https://github.com/jamshale) @@ -52,6 +64,9 @@ There are no breaking changes in 0.12.0rc0. - Fix: RevRegEntry Transaction Endorsement 0.11.0 [\#2558](https://github.com/hyperledger/aries-cloudagent-python/pull/2558) [shaangill025](https://github.com/shaangill025) - JSON-LD Verifiable Credential/DIF Presentation Exchange updates + - Revert profile injection for VcLdpManager on vc-api endpoints [\#2794](https://github.com/hyperledger/aries-cloudagent-python/pull/2794) [PatStLouis](https://github.com/PatStLouis) + - Add cached copy of BBS v1 context [\#2749](https://github.com/hyperledger/aries-cloudagent-python/pull/2749) [andrewwhitehead](https://github.com/andrewwhitehead) + - Update BBS+ context to bypass redirections [\#2739](https://github.com/hyperledger/aries-cloudagent-python/pull/2739) [swcurran](https://github.com/swcurran) - feat: make VcLdpManager pluggable [\#2706](https://github.com/hyperledger/aries-cloudagent-python/pull/2706) [dbluhm](https://github.com/dbluhm) - fix: minor type hint corrections for VcLdpManager [\#2704](https://github.com/hyperledger/aries-cloudagent-python/pull/2704) [dbluhm](https://github.com/dbluhm) - Remove if condition which checks if the credential.type array is equal to 1 [\#2670](https://github.com/hyperledger/aries-cloudagent-python/pull/2670) [PatStLouis](https://github.com/PatStLouis) @@ -65,9 +80,13 @@ There are no breaking changes in 0.12.0rc0. - Add ConnectionProblemReport handler [\#2600](https://github.com/hyperledger/aries-cloudagent-python/pull/2600) [usingtechnology](https://github.com/usingtechnology) - Multitenancy Updates and Fixes + - feature/per tenant settings [\#2790](https://github.com/hyperledger/aries-cloudagent-python/pull/2790) [amanji](https://github.com/amanji) - Improve Per Tenant Logging: Fix issues around default log file path [\#2659](https://github.com/hyperledger/aries-cloudagent-python/pull/2659) [shaangill025](https://github.com/shaangill025) - Other Fixes, Demo, DevContainer and Documentation Fixes + - Demo description of reuse in establishing a connection [\#2787](https://github.com/hyperledger/aries-cloudagent-python/pull/2787) [swcurran](https://github.com/swcurran) + - Reorganize the ACA-Py Documentation Files [\#2765](https://github.com/hyperledger/aries-cloudagent-python/pull/2765) [swcurran](https://github.com/swcurran) + - Tweaks to MD files to enable aca-py.org publishing [\#2771](https://github.com/hyperledger/aries-cloudagent-python/pull/2771) [swcurran](https://github.com/swcurran) - Update devcontainer documentation [\#2729](https://github.com/hyperledger/aries-cloudagent-python/pull/2729) [jamshale](https://github.com/jamshale) - Update the SupportedRFCs Document to be up to date [\#2722](https://github.com/hyperledger/aries-cloudagent-python/pull/2722) [swcurran](https://github.com/swcurran) - Fix incorrect Sphinx search library version reference [\#2716](https://github.com/hyperledger/aries-cloudagent-python/pull/2716) [swcurran](https://github.com/swcurran) @@ -78,6 +97,12 @@ There are no breaking changes in 0.12.0rc0. - Update the ReadTheDocs config in case we do another 0.10.x release [\#2629](https://github.com/hyperledger/aries-cloudagent-python/pull/2629) [swcurran](https://github.com/swcurran) - Dependencies and Internal Updates + - Revert profile inject [\#2789](https://github.com/hyperledger/aries-cloudagent-python/pull/2789) [jamshale](https://github.com/jamshale) + + - Move emit events to profile and delay sending until after commit [\#2760](https://github.com/hyperledger/aries-cloudagent-python/pull/2760) [ianco](https://github.com/ianco) + - fix: partial revert of ConnRecord schema change 1.0.0 [\#2746](https://github.com/hyperledger/aries-cloudagent-python/pull/2746) [dbluhm](https://github.com/dbluhm) + - chore(deps): Bump aiohttp from 3.9.1 to 3.9.2 dependencies [\#2745](https://github.com/hyperledger/aries-cloudagent-python/pull/2745) [dependabot bot](https://github.com/dependabot bot) + - bump pydid to v 0.4.3 [\#2737](https://github.com/hyperledger/aries-cloudagent-python/pull/2737) [PatStLouis](https://github.com/PatStLouis) - Fix subwallet record removal [\#2721](https://github.com/hyperledger/aries-cloudagent-python/pull/2721) [andrewwhitehead](https://github.com/andrewwhitehead) - chore(deps): Bump jinja2 from 3.1.2 to 3.1.3 dependencies [\#2707](https://github.com/hyperledger/aries-cloudagent-python/pull/2707) [dependabot bot](https://github.com/dependabot bot) - feat: inject profile [\#2705](https://github.com/hyperledger/aries-cloudagent-python/pull/2705) [dbluhm](https://github.com/dbluhm) @@ -90,10 +115,19 @@ There are no breaking changes in 0.12.0rc0. - Bump aiohttp from 3.8.6 to 3.9.0 dependencies [\#2635](https://github.com/hyperledger/aries-cloudagent-python/pull/2635) [dependabot bot](https://github.com/dependabot bot) - CI/CD, Testing, and Developer Tools/Productivity Updates + - chore(deps): Bump the all-actions group with 10 updates dependencies [\#2784](https://github.com/hyperledger/aries-cloudagent-python/pull/2784) [dependabot bot](https://github.com/dependabot bot) + - Add Dependabot configuration [\#2783](https://github.com/hyperledger/aries-cloudagent-python/pull/2783) [WadeBarnes](https://github.com/WadeBarnes) + - Implement B006 rule [\#2775](https://github.com/hyperledger/aries-cloudagent-python/pull/2775) [jamshale](https://github.com/jamshale) + - ⬆️ Upgrade pytest to 8.0 [\#2773](https://github.com/hyperledger/aries-cloudagent-python/pull/2773) [ff137](https://github.com/ff137) + - ⬆️ Update pytest-asyncio to 0.23.4 [\#2764](https://github.com/hyperledger/aries-cloudagent-python/pull/2764) [ff137](https://github.com/ff137) + - Remove asynctest dependency and fix "coroutine not awaited" warnings [\#2755](https://github.com/hyperledger/aries-cloudagent-python/pull/2755) [ff137](https://github.com/ff137) + - Fix pytest collection errors when anoncreds package is not installed [\#2750](https://github.com/hyperledger/aries-cloudagent-python/pull/2750) [andrewwhitehead](https://github.com/andrewwhitehead) + - chore: pin black version [\#2747](https://github.com/hyperledger/aries-cloudagent-python/pull/2747) [dbluhm](https://github.com/dbluhm) - Tweak scope of GHA integration tests [\#2662](https://github.com/hyperledger/aries-cloudagent-python/pull/2662) [ianco](https://github.com/ianco) - Update snyk workflow to execute on Pull Request [\#2658](https://github.com/hyperledger/aries-cloudagent-python/pull/2658) [usingtechnology](https://github.com/usingtechnology) - Release management pull requests + - 0.12.0rc1 [\#2798](https://github.com/hyperledger/aries-cloudagent-python/pull/2798) [swcurran](https://github.com/swcurran) - 0.12.0rc0 [\#2732](https://github.com/hyperledger/aries-cloudagent-python/pull/2732) [swcurran](https://github.com/swcurran) ## 0.11.0 diff --git a/PUBLISHING.md b/PUBLISHING.md index e44c2dfdee..4aa7c91003 100644 --- a/PUBLISHING.md +++ b/PUBLISHING.md @@ -91,7 +91,9 @@ Once you have the list of PRs: - Organize the list into suitable categories, update (if necessary) the PR description and add notes to clarify the changes. See previous release entries to understand the style -- a format that should help developers. - Add a narrative about the release above the PR that highlights what has gone into the release. -4. Update the ReadTheDocs in the `/docs` folder by following the instructions in +4. Check to see if there are any other PRs that should be included in the release. + +5. Update the ReadTheDocs in the `/docs` folder by following the instructions in the `docs/README.md` file. That will likely add a number of new and modified files to the PR. Eliminate all of the errors in the generation process, either by mocking external dependencies or by fixing ACA-Py code. If @@ -99,44 +101,36 @@ Once you have the list of PRs: developer. Experience has demonstrated to use that documentation generation errors should be fixed in the code. -5. Regenerate openapi.json and swagger.json by running - `./scripts/generate-open-api-spec`. - -6. Update the version number listed in - [pyproject.toml](https://github.com/hyperledger/aries-cloudagent-python/tree/main/pyproject.toml) and, prefixed with - a "v" in [open-api/openapi.json](https://github.com/hyperledger/open-api/tree/main/openapi.json) and - [open-api/swagger.json](https://github.com/hyperledger/open-api/tree/main/swagger.json) (e.g. "0.7.2" in the - pyproject.toml file and "v0.7.2" in the openapi.json file). The incremented - version number should adhere to the [Semantic Versioning +6. Search across the repository for the previous version number and update it + everywhere that makes sense. The CHANGELOG.md is a likely exception, and the + `pyproject.toml` in the root is **MUST**. You can skip (although it won't + hurt) to update the files in the `open-api` folder as they will be + automagically updated by the next step in publishing. The incremented version + number **MUST** adhere to the [Semantic Versioning Specification](https://semver.org/#semantic-versioning-specification-semver) based on the changes since the last published release. For Release - Candidates, the form of the tag is "0.11.0rc2". As of release `0.11.0` - we have dropped the previously used `-` in the release candidate version - string to better follow the semver rules. - -7. An extra search of the repo for the existing tag is recommended to see if - there are any other instances of the tag in the repo. If any are found to be - required (other than in CHANGELOG.md and the examples in this file, of - course), finding a way to not need them is best, but if they are needed, - please update this document to note where the tag can be found. - -8. Check to see if there are any other PRs that should be included in the release. - -9. Double check all of these steps above, and then submit a PR from the branch. + Candidates, the form of the tag is "0.11.0rc2". As of release `0.11.0` we + have dropped the previously used `-` in the release candidate version string + to better follow the semver rules. + +7. Regenerate openapi.json and swagger.json by running + `./scripts/generate-open-api-spec`. + +8. Double check all of these steps above, and then submit a PR from the branch. Add this new PR to CHANGELOG.md so that all the PRs are included. If there are still further changes to be merged, mark the PR as "Draft", repeat **ALL** of the steps again, and then mark this PR as ready and then wait until it is merged. It's embarrassing when you have to do a whole new release just because you missed something silly...I know! -10. Immediately after it is merged, create a new GitHub tag representing the +9. Immediately after it is merged, create a new GitHub tag representing the version. The tag name and title of the release should be the same as the version in [pyproject.toml](https://github.com/hyperledger/aries-cloudagent-python/tree/main/pyproject.toml). Use the "Generate Release Notes" capability to get a sequential listing of the PRs in the release, to complement the manually curated Changelog. Verify on PyPi that the version is published. -11. New images for the release are automatically published by the GitHubAction +10. New images for the release are automatically published by the GitHubAction Workflows: [publish.yml] and [publish-indy.yml]. The actions are triggered when a release is tagged, so no manual action is needed. The images are published in the [Hyperledger Package Repository under @@ -150,11 +144,11 @@ Once you have the list of PRs: [publish.yml]: https://github.com/hyperledger/aries-cloudagent-python/blob/main/.github/workflows/publish.yml [publish-indy.yml]: https://github.com/hyperledger/aries-cloudagent-python/blob/main/.github/workflows/publish-indy.yml -12. Update the ACA-Py Read The Docs site by building the new "latest" (main +11. Update the ACA-Py Read The Docs site by building the new "latest" (main branch) and activating and building the new release. Appropriate permissions are required to publish the new documentation version. -13. Update the [https://aca-py.org] website with the latest documentation by +12. Update the [https://aca-py.org] website with the latest documentation by creating a PR and tag of the latest documentation from this site. Details are provided in the [aries-acapy-docs] repository. diff --git a/docs/conf.py b/docs/conf.py index 5199e18ab5..336d7e620d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -57,6 +57,9 @@ "pytz", "multiformats", "sd_jwt", + "anoncreds", + "did_peer_2", + "did_peer_4", ] # "aries_cloudagent.tests.test_conductor", @@ -101,7 +104,7 @@ # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # -source_suffix = [".rst", ".md"] +source_suffix = [".rst"] # source_suffix = '.rst' # The master toctree document. diff --git a/docs/features/DIDResolution.md b/docs/features/DIDResolution.md index cd84ecbc91..30e8f9210a 100644 --- a/docs/features/DIDResolution.md +++ b/docs/features/DIDResolution.md @@ -176,7 +176,7 @@ plugin: The following is a fully functional Dockerfile encapsulating this setup: ```dockerfile= -FROM ghcr.io/hyperledger/aries-cloudagent-python:py3.9-0.12.0rc0 +FROM ghcr.io/hyperledger/aries-cloudagent-python:py3.9-0.12.0rc1 RUN pip3 install git+https://github.com/dbluhm/acapy-resolver-github CMD ["aca-py", "start", "-it", "http", "0.0.0.0", "3000", "-ot", "http", "-e", "http://localhost:3000", "--admin", "0.0.0.0", "3001", "--admin-insecure-mode", "--no-ledger", "--plugin", "acapy_resolver_github"] diff --git a/docs/features/SupportedRFCs.md b/docs/features/SupportedRFCs.md index 245f881cea..022999fcf1 100644 --- a/docs/features/SupportedRFCs.md +++ b/docs/features/SupportedRFCs.md @@ -8,7 +8,7 @@ ACA-Py or the repository `main` branch. Reminders (and PRs!) to update this page welcome! If you have any questions, please contact us on the #aries channel on [Hyperledger Discord](https://discord.gg/hyperledger) or through an issue in this repo. -**Last Update**: 2024-01-17, Release 0.12.0rc0 +**Last Update**: 2024-02-17, Release 0.12.0rc1 > The checklist version of this document was created as a joint effort > between [Northern Block](https://northernblock.io/), [Animo Solutions](https://animo.id/) and the Ontario government, on behalf of the Ontario government. @@ -129,6 +129,8 @@ are fully supported in ACA-Py **EXCEPT** as noted in the table below. | [0587-encryption-envelope-v2](https://github.com/hyperledger/aries-rfcs/tree/b3a3942ef052039e73cd23d847f42947f8287da2/features/0587-encryption-envelope-v2) | :construction: | Supporting the DIDComm v2 encryption envelope does not make sense until DIDComm v2 is to be supported. | | [0317-please-ack](https://github.com/hyperledger/aries-rfcs/tree/main/features/0317-please-ack) | :x: | An investigation was done into supporting `please-ack` and a number of complications were found. As a result, we expect that `please-ack` will be dropped from AIP 2.0. It has not been implemented by any Aries frameworks or deployments. | +There is a [PR to the Aries RFCs repository](https://github.com/hyperledger/aries-rfcs/pull/814) to remove those RFCs from AIP 2.0. If that PR is removed, the RFCs will be removed from the table above. + ### Other Supported RFCs | RFC | Supported | Notes | diff --git a/open-api/openapi.json b/open-api/openapi.json index 9302c73f2d..c83a1fe881 100644 --- a/open-api/openapi.json +++ b/open-api/openapi.json @@ -2,7 +2,7 @@ "openapi" : "3.0.1", "info" : { "title" : "Aries Cloud Agent", - "version" : "v0.12.0rc0" + "version" : "v0.12.0rc1" }, "servers" : [ { "url" : "/" @@ -376,7 +376,7 @@ "in" : "query", "name" : "state", "schema" : { - "enum" : [ "active", "error", "invitation", "start", "init", "completed", "request", "response", "abandoned" ], + "enum" : [ "abandoned", "invitation", "active", "response", "request", "start", "error", "completed", "init" ], "type" : "string" } }, { @@ -5649,13 +5649,6 @@ "schema" : { "type" : "string" } - }, { - "description" : "Endorser will write the transaction after endorsing it", - "in" : "query", - "name" : "endorser_write_txn", - "schema" : { - "type" : "boolean" - } } ], "requestBody" : { "content" : { @@ -8382,9 +8375,8 @@ "type" : "string" }, "prover_did" : { - "description" : "Prover DID", - "example" : "WgWxqztrNooG92RXvxSTWv", - "pattern" : "^(did:sov:)?[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}$", + "description" : "Prover DID/Random String/UUID", + "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type" : "string" } }, @@ -11512,8 +11504,8 @@ "type" : "string" }, "endorser_write_txn" : { - "description" : "If True, Endorser will write the transaction after endorsing it", - "example" : true, + "description" : "Request Endorser to write the ledger transaction, this parameter is deprecated and no longer supported.", + "example" : false, "type" : "boolean" }, "formats" : { diff --git a/open-api/swagger.json b/open-api/swagger.json index 619c433e58..a1982cf310 100644 --- a/open-api/swagger.json +++ b/open-api/swagger.json @@ -1,7 +1,7 @@ { "swagger" : "2.0", "info" : { - "version" : "v0.12.0rc0", + "version" : "v0.12.0rc1", "title" : "Aries Cloud Agent" }, "tags" : [ { @@ -339,7 +339,7 @@ "description" : "Connection state", "required" : false, "type" : "string", - "enum" : [ "active", "error", "invitation", "start", "init", "completed", "request", "response", "abandoned" ] + "enum" : [ "abandoned", "invitation", "active", "response", "request", "start", "error", "completed", "init" ] }, { "name" : "their_did", "in" : "query", @@ -4635,12 +4635,6 @@ "description" : "Transaction identifier", "required" : true, "type" : "string" - }, { - "name" : "endorser_write_txn", - "in" : "query", - "description" : "Endorser will write the transaction after endorsing it", - "required" : false, - "type" : "boolean" } ], "responses" : { "200" : { @@ -7210,9 +7204,8 @@ }, "prover_did" : { "type" : "string", - "example" : "WgWxqztrNooG92RXvxSTWv", - "description" : "Prover DID", - "pattern" : "^(did:sov:)?[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}$" + "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "description" : "Prover DID/Random String/UUID" } } }, @@ -10333,8 +10326,8 @@ }, "endorser_write_txn" : { "type" : "boolean", - "example" : true, - "description" : "If True, Endorser will write the transaction after endorsing it" + "example" : false, + "description" : "Request Endorser to write the ledger transaction, this parameter is deprecated and no longer supported." }, "formats" : { "type" : "array", diff --git a/pyproject.toml b/pyproject.toml index d206d36eb1..11b85f1f63 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aries_cloudagent" -version = "0.12.0rc0" +version = "0.12.0rc1" description = "Hyperledger Aries Cloud Agent Python (ACA-Py) is a foundation for building decentralized identity applications and services running in non-mobile environments. " authors = ["Hyperledger Aries "] license = "Apache-2.0" diff --git a/scripts/generate-open-api-spec b/scripts/generate-open-api-spec index 496fa80098..64399f3f3e 100755 --- a/scripts/generate-open-api-spec +++ b/scripts/generate-open-api-spec @@ -38,8 +38,7 @@ ACA_PY_CMD_OPTIONS=" \ --multitenant \ --multitenant-admin \ --jwt-secret test \ - --no-ledger \ - --log-file /usr/src/app/logs/agent.log" + --no-ledger" # Specify openAPI JSON config file and shared directory OPEN_API_JSON_CONFIG="openAPIJSON.config" From b5fe27190f669978370d17ec7caf5813d08b9f63 Mon Sep 17 00:00:00 2001 From: Stephen Curran Date: Sat, 17 Feb 2024 00:44:17 +0000 Subject: [PATCH 29/69] Update PR number for this PR Signed-off-by: Stephen Curran --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 397f0934b4..1257dfb888 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -127,7 +127,7 @@ There are no breaking changes in 0.12.0rc1. - Update snyk workflow to execute on Pull Request [\#2658](https://github.com/hyperledger/aries-cloudagent-python/pull/2658) [usingtechnology](https://github.com/usingtechnology) - Release management pull requests - - 0.12.0rc1 [\#2798](https://github.com/hyperledger/aries-cloudagent-python/pull/2798) [swcurran](https://github.com/swcurran) + - 0.12.0rc1 [\#2799](https://github.com/hyperledger/aries-cloudagent-python/pull/2799) [swcurran](https://github.com/swcurran) - 0.12.0rc0 [\#2732](https://github.com/hyperledger/aries-cloudagent-python/pull/2732) [swcurran](https://github.com/swcurran) ## 0.11.0 From af4b8d3ca84b3eadb7552c03e4e36e52fcb9adcb Mon Sep 17 00:00:00 2001 From: Stephen Curran Date: Sun, 18 Feb 2024 15:36:22 +0000 Subject: [PATCH 30/69] Add latest PRs to Changelog Signed-off-by: Stephen Curran --- CHANGELOG.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1257dfb888..2d28aa53c7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,8 @@ Much progress was made on `did:peer` support in this release, with the handling The most visible change in this release is the re-organization of the ACA-Py documentation, moving the vast majority of the documents to the folders within the `docs` folder -- a long overdue change that will allow us to soon publish the documents on [https://aca-py.org](https://aca-py.org) directly from the ACA-Py repository, rather than from the separate [aries-acapy-docs](https://github.com/hyperledger/aries-acapy-docs) currently being used. +A big developer improvement is a revampling of the test handling to eliminate ~2500 warnings that were previously generated in the test suite. Nice job [@ff137](https://github.com/ff137)! + [DID Peer]: https://identity.foundation/peer-did-method-spec/ [AnonCreds]: https://www.hyperledger.org/projects/anoncreds [Hyperledger AnonCreds Rust]: https://github.com/hyperledger/anoncreds-rs @@ -24,6 +26,8 @@ The most visible change in this release is the re-organization of the ACA-Py doc There are no breaking changes in 0.12.0rc1. + + #### 0.12.0rc1 Categorized List of Pull Requests - DID Handling and Connection Establishment Updates/Fixes @@ -59,6 +63,7 @@ There are no breaking changes in 0.12.0rc1. - Initial code migration from anoncreds-rs branch AnonCreds [\#2596](https://github.com/hyperledger/aries-cloudagent-python/pull/2596) [ianco](https://github.com/ianco) - Hyperledger Indy ledger related updates and fixes + - Add known issues section to Multiledger.md documentation [\#2788](https://github.com/hyperledger/aries-cloudagent-python/pull/2788) [esune](https://github.com/esune) - fix: update constants in TransactionRecord [\#2698](https://github.com/hyperledger/aries-cloudagent-python/pull/2698) [amanji](https://github.com/amanji) - Cache TAA by wallet name [\#2676](https://github.com/hyperledger/aries-cloudagent-python/pull/2676) [jamshale](https://github.com/jamshale) - Fix: RevRegEntry Transaction Endorsement 0.11.0 [\#2558](https://github.com/hyperledger/aries-cloudagent-python/pull/2558) [shaangill025](https://github.com/shaangill025) @@ -97,8 +102,8 @@ There are no breaking changes in 0.12.0rc1. - Update the ReadTheDocs config in case we do another 0.10.x release [\#2629](https://github.com/hyperledger/aries-cloudagent-python/pull/2629) [swcurran](https://github.com/swcurran) - Dependencies and Internal Updates + - Bump pyld version to 2.0.4 [\#2795](https://github.com/hyperledger/aries-cloudagent-python/pull/2795) [PatStLouis](https://github.com/PatStLouis) - Revert profile inject [\#2789](https://github.com/hyperledger/aries-cloudagent-python/pull/2789) [jamshale](https://github.com/jamshale) - - Move emit events to profile and delay sending until after commit [\#2760](https://github.com/hyperledger/aries-cloudagent-python/pull/2760) [ianco](https://github.com/ianco) - fix: partial revert of ConnRecord schema change 1.0.0 [\#2746](https://github.com/hyperledger/aries-cloudagent-python/pull/2746) [dbluhm](https://github.com/dbluhm) - chore(deps): Bump aiohttp from 3.9.1 to 3.9.2 dependencies [\#2745](https://github.com/hyperledger/aries-cloudagent-python/pull/2745) [dependabot bot](https://github.com/dependabot bot) @@ -115,6 +120,7 @@ There are no breaking changes in 0.12.0rc1. - Bump aiohttp from 3.8.6 to 3.9.0 dependencies [\#2635](https://github.com/hyperledger/aries-cloudagent-python/pull/2635) [dependabot bot](https://github.com/dependabot bot) - CI/CD, Testing, and Developer Tools/Productivity Updates + - Fix deprecation warnings [\#2756](https://github.com/hyperledger/aries-cloudagent-python/pull/2756) [ff137](https://github.com/ff137) - chore(deps): Bump the all-actions group with 10 updates dependencies [\#2784](https://github.com/hyperledger/aries-cloudagent-python/pull/2784) [dependabot bot](https://github.com/dependabot bot) - Add Dependabot configuration [\#2783](https://github.com/hyperledger/aries-cloudagent-python/pull/2783) [WadeBarnes](https://github.com/WadeBarnes) - Implement B006 rule [\#2775](https://github.com/hyperledger/aries-cloudagent-python/pull/2775) [jamshale](https://github.com/jamshale) From 78eef9d82d79f98cc54b9af20fa380135da3fb09 Mon Sep 17 00:00:00 2001 From: Stephen Curran Date: Mon, 19 Feb 2024 08:45:59 -0800 Subject: [PATCH 31/69] Adding capability to publish docs to a documentation site Signed-off-by: Stephen Curran --- .github/workflows/publish-docs.yml | 53 ++++++++++ docs/aca-py.org.md | 29 ++++++ docs/assets/aries-favicon.png | 89 +++++++++++++++++ mkdocs-requirements.txt | 3 + mkdocs.yml | 150 +++++++++++++++++++++++++++++ 5 files changed, 324 insertions(+) create mode 100644 .github/workflows/publish-docs.yml create mode 100644 docs/aca-py.org.md create mode 100644 docs/assets/aries-favicon.png create mode 100644 mkdocs-requirements.txt create mode 100644 mkdocs.yml diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml new file mode 100644 index 0000000000..5ed82add33 --- /dev/null +++ b/.github/workflows/publish-docs.yml @@ -0,0 +1,53 @@ +name: publish-docs + +on: + push: + # Publish `main` as latest + branches: + - main + + # Publish `v1.2.3` tags as releases + tags: + - v* + +permissions: + contents: write + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 # fetch all commits/branches + - uses: actions/setup-python@v4 + with: + python-version: 3.x + - uses: actions/cache@v2 + with: + key: ${{ github.ref }} + path: .cache + - name: Install Python dependencies + run: pip install -r ./mkdocs-requirements.txt + - name: Configure git user + run: | + git config --local user.email "github-actions[bot]@users.noreply.github.com" + git config --local user.name "github-actions[bot]" + + - name: Deploy docs + run: | + # Strip git ref prefix from version + echo "${{ github.ref }}" + VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,') + # Strip "v" prefix from tag name + [[ "${{ github.ref }}" == "refs/tags/"* ]] && ALIAS=$(echo $VERSION | sed -e 's/^v//') + # Copy all of the root level md files into the docs folder for deployment, tweaking the relative paths + for i in *.md; do sed -e "s#docs/#./#g" $i >docs/$i; done + # Create overrides folder, populate it for version, and then move to not apply if VERSION is main branch + mkdir overrides + echo -e "{% extends "base.html" %}\n\n{% block outdated %}\n You are viewing the documentation for ACA-Py Release $VERSION.\n{% endblock %}" >overrides/base.html + # If building from main, use latest as ALIAS + [ "$VERSION" == "main" ] && ALIAS=latest && mv overrides/base.html overrides/base.txt + echo $VERSION $ALIAS + mike deploy --push --update-aliases $VERSION $ALIAS + mike set-default latest diff --git a/docs/aca-py.org.md b/docs/aca-py.org.md new file mode 100644 index 0000000000..5c6c394712 --- /dev/null +++ b/docs/aca-py.org.md @@ -0,0 +1,29 @@ +# Welcome! + +![Hyperledger Aries](https://raw.githubusercontent.com/hyperledger/aries-acapy-docs/main/assets/Hyperledger_Aries_Logo_Color.png) + +Welcome to the Aries Cloud Agent Python documentation site. On this site you +will find documentation for recent releases of ACA-Py. You'll find a few of the +older versions of ACA-Py (pre-`0.8.0`), all versions since `0.8.0`, and the +`main` branch, which is the latest and greatest. + +All of the documentation here is extracted from the [Aries Cloud Agent Python repository]. +If you want to contribute to the documentation, please start there. + +Ready to go? Scan the tabs in the page header to find the documentation you need now! + +## Code Internals Documentation + +In addition to this documentation site, the ACA-Py community also maintains an +ACA-Py internals documentation site. The internals documentation consists of the +`docstrings` extracted from the ACA-Py Python code and covers all of the +(non-test) modules in the codebase. Check it out on the [Aries Cloud +Agent-Python ReadTheDocs site](https://aries-cloud-agent-python.readthedocs.io/en/latest/). +As with this site, the ReadTheDocs documentation is version specific. + +Got questions? + +- Join us on the [Hyperledger Discord Server](https://chat.hyperledger.org), in the `#aries-cloudagent-python` channel. +- Add an issue in the [Aries Cloud Agent Python repository]. + +[Aries Cloud Agent Python repository]: https://github.com/hyperledger/aries-cloudagent-python diff --git a/docs/assets/aries-favicon.png b/docs/assets/aries-favicon.png new file mode 100644 index 0000000000..5b10050e05 --- /dev/null +++ b/docs/assets/aries-favicon.png @@ -0,0 +1,89 @@ + + + + + + Page not found · GitHub Pages + + + + +
+ +

404

+

File not found

+ +

+ The site configured at this address does not + contain the requested file. +

+ +

+ If this is your site, make sure that the filename case matches the URL + as well as any file permissions.
+ For root URLs (like http://example.com/) you must provide an + index.html file. +

+ +

+ Read the full documentation + for more information about using GitHub Pages. +

+ + + + + + +
+ + diff --git a/mkdocs-requirements.txt b/mkdocs-requirements.txt new file mode 100644 index 0000000000..734a4b2c1f --- /dev/null +++ b/mkdocs-requirements.txt @@ -0,0 +1,3 @@ +mkdocs-material +mike +tzdata \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 0000000000..1c9a9b0667 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,150 @@ +site_name: Hyperledger Aries ACA-Py Docs +repo_name: hyperledger/aries-cloudagent-python +repo_url: https://github.com/hyperledger/aries-cloudagent-python +theme: + name: material + custom_dir: overrides + logo: https://raw.githubusercontent.com/hyperledger/aries-acapy-docs/main/assets/Hyperledger_Aries_Logo_White.png + favicon: https://raw.githubusercontent.com/hyperledger/aries-cloudagent-python/main/docs/assets/aries-favicon.png + icon: + repo: fontawesome/brands/github + palette: + # Palette toggle for light mode + - media: "(prefers-color-scheme: light)" + scheme: default + toggle: + icon: material/brightness-7 + name: Switch to dark mode + # Palette toggle for dark mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + toggle: + icon: material/brightness-4 + name: Switch to light mode + features: + - content.code.copy + - navigation.expand + - navigation.footer + - navigation.instant + - navigation.tabs + - navigation.tabs.sticky + - navigation.top + - navigation.tracking + - toc.follow +# - toc.integrate +markdown_extensions: + - abbr + - admonition + - attr_list + - def_list + - footnotes + - md_in_html + - toc: + permalink: true + toc_depth: 3 + - pymdownx.arithmatex: + generic: true + - pymdownx.betterem: + smart_enable: all + - pymdownx.caret + - pymdownx.details + - pymdownx.emoji: + emoji_generator: !!python/name:materialx.emoji.to_svg + emoji_index: !!python/name:materialx.emoji.twemoji + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.inlinehilite + - pymdownx.keys + - pymdownx.magiclink: + repo_url_shorthand: true + user: squidfunk + repo: mkdocs-material + - pymdownx.mark + - pymdownx.smartsymbols + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + - pymdownx.tabbed: + alternate_style: true + - pymdownx.tasklist: + custom_checkbox: true + - pymdownx.tilde +plugins: + - search + - mike +extra: + version: + provider: mike +nav: +- Welcome!: + - Welcome: aca-py.org.md + - ACA-Py README: README.md + - Release Notes: CHANGELOG.md +- Features: + - Developer Introduction: features/DevReadMe.md + - DevContainer Support: features/devcontainer.md + - Supported Aries Interop Profiles and RFCs: features/SupportedRFCs.md + - The Admin API: features/AdminAPI.md + - ACA-Py Plugins: features/PlugIns.md + - Multitenant ACA-Py: features/Multitenancy.md + - DID Methods: features/DIDMethods.md + - DID Resolution: features/DIDResolution.md + - Configuring Multiple Indy Ledgers: features/Multiledger.md + - Automatically Endorsing Indy Transations: features/Endorser.md + - Using W3C JSON-LD Signed Credentials: features/JsonLdCredentials.md + - Using SD-JWTs: features/SelectiveDisclosureJWTs.md + - AnonCreds Presentation Validation: features/AnoncredsProofValidation.md + - Multiple Credential Types: features/Multicredentials.md + - Code Generation with the Open API: features/UsingOpenAPI.md + - ACA-Py as a DIDComm Mediator: features/Mediation.md +- Demos: + - The Alice-Faber Demo: demo/README.md + - Open API Tutorial: demo/AriesOpenAPIDemo.md + - Alice Gets a Phone: demo/AliceGetsAPhone.md + - Hyperledger Indy Endorser In Action: demo/Endorser.md + - Using W3C JSON-LD Credentials: demo/AliceWantsAJsonCredential.md + - DIY -- ACME Controller Workshop: demo/AcmeDemoWorkshop.md + - Aries Using Postman Demo: demo/AriesPostmanDemo.md +- Getting Started: + - Becoming an Indy/Aries Developer: gettingStarted/README.md + - Hyperledger Indy Basics: gettingStarted/IndyBasics.md + - Hyperledger Aries Basics: gettingStarted/AriesBasics.md + - Decentralized Identity Demos: gettingStarted/DecentralizedIdentityDemos.md + - Aries - The Big Picture: gettingStarted/AriesBigPicture.md + - Aries Architecture: gettingStarted/AriesAgentArchitecture.md + - Aries Messaging: gettingStarted/AriesMessaging.md + - Aries Developer Demos: gettingStarted/AriesDeveloperDemos.md + - Agent Connections: gettingStarted/AgentConnections.md + - Issuing AnonCreds Credentials: gettingStarted/IssuingAnonCredsCredentials.md + - Presenting AnonCreds Proofs: gettingStarted/PresentingAnonCredsProofs.md + - Making Your Own ACA-Py Agent: gettingStarted/YourOwnAriesAgent.md + - Aries Developer Options: gettingStarted/IndyAriesDevOptions.md + - DIDComm Messaging: gettingStarted/DIDcommMsgs.md + - DIDComm Message Routing: gettingStarted/RoutingEncryption.md + - DIDComm Message Routing Example: gettingStarted/AriesRoutingExample.md + - TODO Connecting to an Indy Network: gettingStarted/ConnectIndyNetwork.md + - AnonCreds Credential Revocation: gettingStarted/CredentialRevocation.md +- Deploying: + - Deployment Model: deploying/deploymentModel.md + - Upgrading ACA-Py: deploying/UpgradingACA-Py.md + - Indy SDK to Askar Migration: deploying/IndySDKtoAskarMigration.md + - The Use of Poetry in ACA-Py: deploying/Poetry.md + - ACA-Py Container Images: deploying/ContainerImagesAndGithubActions.md + - Databases: deploying/Databases.md + - Persistent Queues and Caching: deploying/RedisPlugins.md + - The askar-anoncreds Wallet Type: deploying/AnonCredsWalletType.md +- Testing/Troubleshooting: + - Running and Creating Unit Tests: testing/UnitTests.md + - Managing Logging: testing/Logging.md + - ACA-Py Integration Tests: testing/INTEGRATION-TESTS.md + - Protocol Tracing: testing/AgentTracing.md + - Troubleshooting: testing/Troubleshooting.md +- Contributing: + - How to Contribute: CONTRIBUTING.md + - Maintainers: MAINTAINERS.md + - Hyperledger Code of Conduct: CODE_OF_CONDUCT.md + - Security Vulnerability Reporting: SECURITY.md + - Publishing an ACA-Py Release: PUBLISHING.md + - Updating the ACA-Py ReadTheDocs Site: UpdateRTD.md From c842a13e1d5507222c5f68659c6273a148f9ad75 Mon Sep 17 00:00:00 2001 From: Stephen Curran Date: Mon, 19 Feb 2024 12:33:59 -0800 Subject: [PATCH 32/69] Add last merged PR, some tweaks to the mkdocs process Signed-off-by: Stephen Curran --- CHANGELOG.md | 1 + mkdocs-requirements.txt | 6 +++--- overrides/README.md | 6 ++++++ 3 files changed, 10 insertions(+), 3 deletions(-) create mode 100644 overrides/README.md diff --git a/CHANGELOG.md b/CHANGELOG.md index 2d28aa53c7..91e081ac98 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -102,6 +102,7 @@ There are no breaking changes in 0.12.0rc1. - Update the ReadTheDocs config in case we do another 0.10.x release [\#2629](https://github.com/hyperledger/aries-cloudagent-python/pull/2629) [swcurran](https://github.com/swcurran) - Dependencies and Internal Updates + - Change middleware registration order [\#2796](https://github.com/hyperledger/aries-cloudagent-python/pull/2796) [PatStLouis](https://github.com/PatStLouis) - Bump pyld version to 2.0.4 [\#2795](https://github.com/hyperledger/aries-cloudagent-python/pull/2795) [PatStLouis](https://github.com/PatStLouis) - Revert profile inject [\#2789](https://github.com/hyperledger/aries-cloudagent-python/pull/2789) [jamshale](https://github.com/jamshale) - Move emit events to profile and delay sending until after commit [\#2760](https://github.com/hyperledger/aries-cloudagent-python/pull/2760) [ianco](https://github.com/ianco) diff --git a/mkdocs-requirements.txt b/mkdocs-requirements.txt index 734a4b2c1f..a64b578cf6 100644 --- a/mkdocs-requirements.txt +++ b/mkdocs-requirements.txt @@ -1,3 +1,3 @@ -mkdocs-material -mike -tzdata \ No newline at end of file + +mkdocs-material==9.5.10 +mike==2.0.0 diff --git a/overrides/README.md b/overrides/README.md new file mode 100644 index 0000000000..568e43f838 --- /dev/null +++ b/overrides/README.md @@ -0,0 +1,6 @@ +# Mkdocs Overrides + +This folder contains any overrides for the mkdocs docs publishing. Most notably, +the `base.html` file that puts a banner on the screen for all versions of the +docs other than the main branch. The `base.html` file is generated on publishing +the docs (in the publishing GitHub Action) -- and does not exist in the main branch. From 33a1c4887c191cdc7bd7cdc132423da5889ca7c5 Mon Sep 17 00:00:00 2001 From: Stephen Curran Date: Tue, 20 Feb 2024 06:50:55 -0800 Subject: [PATCH 33/69] 0.12.0rc1 Signed-off-by: Stephen Curran --- .github/workflows/publish-docs.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index 5ed82add33..82cbdd3554 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -43,11 +43,10 @@ jobs: [[ "${{ github.ref }}" == "refs/tags/"* ]] && ALIAS=$(echo $VERSION | sed -e 's/^v//') # Copy all of the root level md files into the docs folder for deployment, tweaking the relative paths for i in *.md; do sed -e "s#docs/#./#g" $i >docs/$i; done - # Create overrides folder, populate it for version, and then move to not apply if VERSION is main branch - mkdir overrides + # Populate overrides for the current version, and then remove to not apply if VERSION is main branch echo -e "{% extends "base.html" %}\n\n{% block outdated %}\n You are viewing the documentation for ACA-Py Release $VERSION.\n{% endblock %}" >overrides/base.html # If building from main, use latest as ALIAS - [ "$VERSION" == "main" ] && ALIAS=latest && mv overrides/base.html overrides/base.txt + [ "$VERSION" == "main" ] && ALIAS=latest && rm overrides/base.html echo $VERSION $ALIAS mike deploy --push --update-aliases $VERSION $ALIAS mike set-default latest From 96df9d0a637d8a9ef4c817be658ffe43c1b318fc Mon Sep 17 00:00:00 2001 From: Stephen Curran Date: Tue, 20 Feb 2024 06:52:49 -0800 Subject: [PATCH 34/69] Changlelog update Signed-off-by: Stephen Curran --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 91e081ac98..45c584ede3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -134,6 +134,7 @@ There are no breaking changes in 0.12.0rc1. - Update snyk workflow to execute on Pull Request [\#2658](https://github.com/hyperledger/aries-cloudagent-python/pull/2658) [usingtechnology](https://github.com/usingtechnology) - Release management pull requests + - 0.12.0rc1 [\#2800](https://github.com/hyperledger/aries-cloudagent-python/pull/2800) [swcurran](https://github.com/swcurran) - 0.12.0rc1 [\#2799](https://github.com/hyperledger/aries-cloudagent-python/pull/2799) [swcurran](https://github.com/swcurran) - 0.12.0rc0 [\#2732](https://github.com/hyperledger/aries-cloudagent-python/pull/2732) [swcurran](https://github.com/swcurran) From 208791565a015d07c83096b2114957a7d49b7d67 Mon Sep 17 00:00:00 2001 From: jamshale <31809382+jamshale@users.noreply.github.com> Date: Tue, 20 Feb 2024 12:08:08 -0800 Subject: [PATCH 35/69] Author subwallet setup automation (#2791) Signed-off-by: jamshale --- aries_cloudagent/multitenant/admin/routes.py | 48 +++++--- .../multitenant/admin/tests/test_routes.py | 48 ++++++-- .../protocols/didexchange/v1_0/manager.py | 4 +- .../didexchange/v1_0/tests/test_manager.py | 13 ++- .../endorse_transaction/v1_0/routes.py | 89 +-------------- aries_cloudagent/utils/endorsement_setup.py | 104 ++++++++++++++++++ .../utils/tests/test_endorsement_setup.py | 64 +++++++++++ 7 files changed, 253 insertions(+), 117 deletions(-) create mode 100644 aries_cloudagent/utils/endorsement_setup.py create mode 100644 aries_cloudagent/utils/tests/test_endorsement_setup.py diff --git a/aries_cloudagent/multitenant/admin/routes.py b/aries_cloudagent/multitenant/admin/routes.py index 2ebdc83800..d4c0546779 100644 --- a/aries_cloudagent/multitenant/admin/routes.py +++ b/aries_cloudagent/multitenant/admin/routes.py @@ -18,53 +18,62 @@ from ...messaging.valid import UUID4_EXAMPLE, JSONWebToken from ...multitenant.base import BaseMultitenantManager from ...storage.error import StorageError, StorageNotFoundError +from ...utils.endorsement_setup import attempt_auto_author_with_endorser_setup from ...wallet.error import WalletSettingsError from ...wallet.models.wallet_record import WalletRecord, WalletRecordSchema from ..error import WalletKeyMissingError ACAPY_LIFECYCLE_CONFIG_FLAG_MAP = { - "ACAPY_LOG_LEVEL": "log.level", - "ACAPY_INVITE_PUBLIC": "debug.invite_public", - "ACAPY_PUBLIC_INVITES": "public_invites", "ACAPY_AUTO_ACCEPT_INVITES": "debug.auto_accept_invites", "ACAPY_AUTO_ACCEPT_REQUESTS": "debug.auto_accept_requests", "ACAPY_AUTO_PING_CONNECTION": "auto_ping_connection", - "ACAPY_MONITOR_PING": "debug.monitor_ping", - "ACAPY_AUTO_RESPOND_MESSAGES": "debug.auto_respond_messages", + "ACAPY_AUTO_PROMOTE_AUTHOR_DID": "endorser.auto_promote_author_did", + "ACAPY_AUTO_REQUEST_ENDORSEMENT": "endorser.auto_request", "ACAPY_AUTO_RESPOND_CREDENTIAL_OFFER": "debug.auto_respond_credential_offer", "ACAPY_AUTO_RESPOND_CREDENTIAL_REQUEST": "debug.auto_respond_credential_request", + "ACAPY_AUTO_RESPOND_MESSAGES": "debug.auto_respond_messages", "ACAPY_AUTO_VERIFY_PRESENTATION": "debug.auto_verify_presentation", - "ACAPY_NOTIFY_REVOCATION": "revocation.notify", - "ACAPY_AUTO_REQUEST_ENDORSEMENT": "endorser.auto_request", "ACAPY_AUTO_WRITE_TRANSACTIONS": "endorser.auto_write", "ACAPY_CREATE_REVOCATION_TRANSACTIONS": "endorser.auto_create_rev_reg", - "ACAPY_ENDORSER_ROLE": "endorser.protocol_role", "ACAPY_EMIT_DID_PEER_2": "emit_did_peer_2", "ACAPY_EMIT_DID_PEER_4": "emit_did_peer_4", + "ACAPY_ENDORSER_ALIAS": "endorser.endorser_alias", + "ACAPY_ENDORSER_INVITATION": "endorser.endorser_invitation", + "ACAPY_ENDORSER_PUBLIC_DID": "endorser.endorser_public_did", + "ACAPY_ENDORSER_ROLE": "endorser.protocol_role", + "ACAPY_INVITE_PUBLIC": "debug.invite_public", + "ACAPY_LOG_LEVEL": "log.level", + "ACAPY_MONITOR_PING": "debug.monitor_ping", + "ACAPY_NOTIFY_REVOCATION": "revocation.notify", "ACAPY_PRESERVE_EXCHANGE_RECORDS": "preserve_exchange_records", + "ACAPY_PUBLIC_INVITES": "public_invites", "ACAPY_REQUESTS_THROUGH_PUBLIC_DID": "requests_through_public_did", } ACAPY_LIFECYCLE_CONFIG_FLAG_ARGS_MAP = { - "log-level": "log.level", - "invite-public": "debug.invite_public", - "public-invites": "public_invites", "auto-accept-invites": "debug.auto_accept_invites", "auto-accept-requests": "debug.auto_accept_requests", + "auto-create-revocation-transactions": "endorser.auto_create_rev_reg", "auto-ping-connection": "auto_ping_connection", - "monitor-ping": "debug.monitor_ping", - "auto-respond-messages": "debug.auto_respond_messages", + "auto-promote-author-did": "endorser.auto_promote_author_did", + "auto-request-endorsement": "endorser.auto_request", "auto-respond-credential-offer": "debug.auto_respond_credential_offer", "auto-respond-credential-request": "debug.auto_respond_credential_request", + "auto-respond-messages": "debug.auto_respond_messages", "auto-verify-presentation": "debug.auto_verify_presentation", - "notify-revocation": "revocation.notify", - "auto-request-endorsement": "endorser.auto_request", "auto-write-transactions": "endorser.auto_write", - "auto-create-revocation-transactions": "endorser.auto_create_rev_reg", - "endorser-protocol-role": "endorser.protocol_role", "emit-did-peer-2": "emit_did_peer_2", "emit-did-peer-4": "emit_did_peer_4", + "endorser-alias": "endorser.endorser_alias", + "endorser-invitation": "endorser.endorser_invitation", + "endorser-protocol-role": "endorser.protocol_role", + "endorser-public-did": "endorser.endorser_public_did", + "invite-public": "debug.invite_public", + "log-level": "log.level", + "monitor-ping": "debug.monitor_ping", + "notify-revocation": "revocation.notify", "preserve-exchange-records": "preserve_exchange_records", + "public-invites": "public_invites", "requests-through-public-did": "requests_through_public_did", } @@ -459,6 +468,11 @@ async def wallet_create(request: web.BaseRequest): ) token = await multitenant_mgr.create_auth_token(wallet_record, wallet_key) + + wallet_profile = await multitenant_mgr.get_wallet_profile( + context, wallet_record, extra_settings=settings + ) + await attempt_auto_author_with_endorser_setup(wallet_profile) except BaseError as err: raise web.HTTPBadRequest(reason=err.roll_up) from err diff --git a/aries_cloudagent/multitenant/admin/tests/test_routes.py b/aries_cloudagent/multitenant/admin/tests/test_routes.py index 2d24d902ee..cc549ade09 100644 --- a/aries_cloudagent/multitenant/admin/tests/test_routes.py +++ b/aries_cloudagent/multitenant/admin/tests/test_routes.py @@ -1,13 +1,15 @@ from unittest import IsolatedAsyncioTestCase -from aries_cloudagent.tests import mock + +import pytest from marshmallow.exceptions import ValidationError -from ...base import BaseMultitenantManager, MultitenantManagerError +from aries_cloudagent.tests import mock + from ....admin.request_context import AdminRequestContext -from ....wallet.models.wallet_record import WalletRecord from ....messaging.models.base import BaseModelError from ....storage.error import StorageError, StorageNotFoundError - +from ....wallet.models.wallet_record import WalletRecord +from ...base import BaseMultitenantManager, MultitenantManagerError from .. import routes as test_module @@ -139,6 +141,7 @@ async def test_wallets_list_query(self): } ) + @pytest.mark.asyncio(scope="module") async def test_wallet_create_tenant_settings(self): body = { "wallet_name": "test", @@ -156,6 +159,8 @@ async def test_wallet_create_tenant_settings(self): } self.request.json = mock.CoroutineMock(return_value=body) + test_module.attempt_auto_author_with_endorser_setup = mock.CoroutineMock() + with mock.patch.object(test_module.web, "json_response") as mock_response: wallet_mock = mock.MagicMock( serialize=mock.MagicMock( @@ -173,7 +178,10 @@ async def test_wallet_create_tenant_settings(self): self.mock_multitenant_mgr.create_auth_token = mock.CoroutineMock( return_value="test_token" ) - print(self.request["context"]) + self.mock_multitenant_mgr.get_wallet_profile = mock.CoroutineMock( + return_value=mock.MagicMock() + ) + await test_module.wallet_create(self.request) self.mock_multitenant_mgr.create_wallet.assert_called_once_with( @@ -195,6 +203,8 @@ async def test_wallet_create_tenant_settings(self): mock_response.assert_called_once_with( {**test_module.format_wallet_record(wallet_mock), "token": "test_token"} ) + assert self.mock_multitenant_mgr.get_wallet_profile.called + assert test_module.attempt_auto_author_with_endorser_setup.called async def test_wallet_create(self): body = { @@ -207,6 +217,7 @@ async def test_wallet_create(self): "wallet_dispatch_type": "base", } self.request.json = mock.CoroutineMock(return_value=body) + test_module.attempt_auto_author_with_endorser_setup = mock.CoroutineMock() with mock.patch.object(test_module.web, "json_response") as mock_response: wallet_mock = mock.MagicMock( @@ -225,7 +236,10 @@ async def test_wallet_create(self): self.mock_multitenant_mgr.create_auth_token = mock.CoroutineMock( return_value="test_token" ) - print(self.request["context"]) + self.mock_multitenant_mgr.get_wallet_profile = mock.CoroutineMock( + return_value=mock.MagicMock() + ) + await test_module.wallet_create(self.request) self.mock_multitenant_mgr.create_wallet.assert_called_once_with( @@ -242,8 +256,13 @@ async def test_wallet_create(self): wallet_mock, body["wallet_key"] ) mock_response.assert_called_once_with( - {**test_module.format_wallet_record(wallet_mock), "token": "test_token"} + { + **test_module.format_wallet_record(wallet_mock), + "token": "test_token", + } ) + assert self.mock_multitenant_mgr.get_wallet_profile.called + assert test_module.attempt_auto_author_with_endorser_setup.called async def test_wallet_create_x(self): body = {} @@ -277,6 +296,9 @@ async def test_wallet_create_optional_default_fields(self): return_value=mock.MagicMock() ) self.mock_multitenant_mgr.create_auth_token = mock.CoroutineMock() + self.mock_multitenant_mgr.get_wallet_profile = mock.CoroutineMock( + return_value=mock.MagicMock() + ) await test_module.wallet_create(self.request) self.mock_multitenant_mgr.create_wallet.assert_called_once_with( @@ -292,6 +314,7 @@ async def test_wallet_create_optional_default_fields(self): }, WalletRecord.MODE_MANAGED, ) + assert self.mock_multitenant_mgr.get_wallet_profile.called async def test_wallet_create_raw_key_derivation(self): body = { @@ -306,6 +329,9 @@ async def test_wallet_create_raw_key_derivation(self): return_value=mock.MagicMock() ) self.mock_multitenant_mgr.create_auth_token = mock.CoroutineMock() + self.mock_multitenant_mgr.get_wallet_profile = mock.CoroutineMock( + return_value=mock.MagicMock() + ) await test_module.wallet_create(self.request) self.mock_multitenant_mgr.create_wallet.assert_called_once_with( @@ -319,6 +345,7 @@ async def test_wallet_create_raw_key_derivation(self): }, WalletRecord.MODE_MANAGED, ) + assert self.mock_multitenant_mgr.get_wallet_profile.called async def test_wallet_update_tenant_settings(self): self.request.match_info = {"wallet_id": "test-wallet-id"} @@ -651,6 +678,7 @@ async def test_wallet_create_token_x(self): ) await test_module.wallet_create_token(self.request) + @pytest.mark.asyncio(scope="module") async def test_wallet_remove_managed(self): self.request.has_body = False self.request.match_info = {"wallet_id": "dummy"} @@ -662,12 +690,13 @@ async def test_wallet_remove_managed(self): ): self.mock_multitenant_mgr.remove_wallet = mock.CoroutineMock() - await test_module.wallet_remove(self.request) + result = await test_module.wallet_remove(self.request) self.mock_multitenant_mgr.remove_wallet.assert_called_once_with( "dummy", None ) mock_response.assert_called_once_with({}) + assert result == mock_response.return_value async def test_wallet_remove_unmanaged(self): self.request.match_info = {"wallet_id": "dummy"} @@ -680,12 +709,13 @@ async def test_wallet_remove_unmanaged(self): ): self.mock_multitenant_mgr.remove_wallet = mock.CoroutineMock() - await test_module.wallet_remove(self.request) + result = await test_module.wallet_remove(self.request) self.mock_multitenant_mgr.remove_wallet.assert_called_once_with( "dummy", "dummy_key" ) mock_response.assert_called_once_with({}) + assert result == mock_response.return_value async def test_wallet_remove_managed_wallet_key_provided_throws(self): self.request.match_info = {"wallet_id": "dummy"} diff --git a/aries_cloudagent/protocols/didexchange/v1_0/manager.py b/aries_cloudagent/protocols/didexchange/v1_0/manager.py index 60bdbc8808..65d8051b06 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/manager.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/manager.py @@ -6,6 +6,7 @@ from did_peer_4 import LONG_PATTERN, long_to_short +from ....admin.server import AdminResponder from ....connections.base_manager import BaseConnectionManager from ....connections.models.conn_record import ConnRecord from ....connections.models.connection_target import ConnectionTarget @@ -150,7 +151,8 @@ async def receive_invitation( if conn_rec.accept == ConnRecord.ACCEPT_AUTO: request = await self.create_request(conn_rec, mediation_id=mediation_id) - responder = self.profile.inject_or(BaseResponder) + base_responder = self.profile.inject(BaseResponder) + responder = AdminResponder(self.profile, base_responder.send_fn) if responder: await responder.send_reply( request, diff --git a/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py b/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py index 1c08be393d..5f70dbc2a8 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py @@ -5,6 +5,7 @@ from aries_cloudagent.tests import mock +from .....admin.server import AdminResponder from .....cache.base import BaseCache from .....cache.in_memory import InMemoryCache from .....connections.models.conn_record import ConnRecord @@ -72,7 +73,7 @@ def make_did_doc(self, did, verkey): class TestDidExchangeManager(IsolatedAsyncioTestCase, TestConfig): async def asyncSetUp(self): self.responder = MockResponder() - + self.responder.send_fn = mock.CoroutineMock() self.oob_mock = mock.MagicMock( clean_finished_oob_record=mock.CoroutineMock(return_value=None) ) @@ -181,7 +182,9 @@ async def test_receive_invitation(self): test_module, "AttachDecorator", autospec=True ) as mock_attach_deco, mock.patch.object( self.multitenant_mgr, "get_default_mediator" - ) as mock_get_default_mediator: + ) as mock_get_default_mediator, mock.patch.object( + AdminResponder, "send_reply" + ) as mock_send_reply: mock_get_default_mediator.return_value = mediation_record invi_rec = await self.oob_manager.create_invitation( my_endpoint="testendpoint", @@ -195,6 +198,7 @@ async def test_receive_invitation(self): ) invitee_record = await self.manager.receive_invitation(invi_msg) assert invitee_record.state == ConnRecord.State.REQUEST.rfc23 + assert mock_send_reply.called async def test_receive_invitation_oob_public_did(self): async with self.profile.session() as session: @@ -211,7 +215,9 @@ async def test_receive_invitation_oob_public_did(self): self.multitenant_mgr, "get_default_mediator" ) as mock_get_default_mediator, mock.patch.object( self.manager, "resolve_connection_targets", mock.CoroutineMock() - ) as mock_resolve_targets: + ) as mock_resolve_targets, mock.patch.object( + AdminResponder, "send_reply" + ) as mock_send_reply: mock_resolve_targets.return_value = [ mock.MagicMock(recipient_keys=["test"]) ] @@ -231,6 +237,7 @@ async def test_receive_invitation_oob_public_did(self): invi_msg, their_public_did=public_did_info.did ) assert invitee_record.state == ConnRecord.State.REQUEST.rfc23 + assert mock_send_reply.called async def test_receive_invitation_no_auto_accept(self): async with self.profile.session() as session: diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py index 1013320117..5631c161e2 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py @@ -10,7 +10,6 @@ request_schema, response_schema, ) - from marshmallow import fields, validate from ....admin.request_context import AdminRequestContext @@ -23,17 +22,11 @@ from ....messaging.models.base import BaseModelError from ....messaging.models.openapi import OpenAPISchema from ....messaging.valid import UUID4_EXAMPLE -from ....protocols.connections.v1_0.manager import ConnectionManager -from ....protocols.connections.v1_0.messages.connection_invitation import ( - ConnectionInvitation, -) -from ....protocols.out_of_band.v1_0.manager import OutOfBandManager -from ....protocols.out_of_band.v1_0.messages.invitation import InvitationMessage from ....storage.error import StorageError, StorageNotFoundError +from ....utils.endorsement_setup import attempt_auto_author_with_endorser_setup from .manager import TransactionManager, TransactionManagerError from .models.transaction_record import TransactionRecord, TransactionRecordSchema from .transaction_jobs import TransactionJob -from .util import get_endorser_connection_id, is_author_role LOGGER = logging.getLogger(__name__) @@ -707,85 +700,7 @@ def register_events(event_bus: EventBus): async def on_startup_event(profile: Profile, event: Event): """Handle any events we need to support.""" - # auto setup is only for authors - if not is_author_role(profile): - return - - # see if we have an invitation to connect to the endorser - endorser_invitation = profile.settings.get_value("endorser.endorser_invitation") - if not endorser_invitation: - # no invitation, we can't connect automatically - return - - # see if we need to initiate an endorser connection - endorser_alias = profile.settings.get_value("endorser.endorser_alias") - if not endorser_alias: - # no alias is specified for the endorser connection - # note that alias is required if invitation is specified - return - - connection_id = await get_endorser_connection_id(profile) - if connection_id: - # there is already a connection - return - - endorser_did = profile.settings.get_value("endorser.endorser_public_did") - if not endorser_did: - # no DID, we can connect but we can't properly setup the connection metadata - # note that DID is required if invitation is specified - return - - try: - # OK, we are an author, we have no endorser connection but we have enough info - # to automatically initiate the connection - invite = InvitationMessage.from_url(endorser_invitation) - if invite: - oob_mgr = OutOfBandManager(profile) - oob_record = await oob_mgr.receive_invitation( - invitation=invite, - auto_accept=True, - alias=endorser_alias, - ) - async with profile.session() as session: - conn_record = await ConnRecord.retrieve_by_id( - session, oob_record.connection_id - ) - else: - invite = ConnectionInvitation.from_url(endorser_invitation) - if invite: - conn_mgr = ConnectionManager(profile) - conn_record = await conn_mgr.receive_invitation( - invitation=invite, - auto_accept=True, - alias=endorser_alias, - ) - else: - raise Exception( - "Failed to establish endorser connection, invalid " - "invitation format." - ) - - # configure the connection role and info (don't need to wait for the connection) - transaction_mgr = TransactionManager(profile) - await transaction_mgr.set_transaction_my_job( - record=conn_record, - transaction_my_job=TransactionJob.TRANSACTION_AUTHOR.name, - ) - - async with profile.session() as session: - value = await conn_record.metadata_get(session, "endorser_info") - if value: - value["endorser_did"] = endorser_did - value["endorser_name"] = endorser_alias - else: - value = {"endorser_did": endorser_did, "endorser_name": endorser_alias} - await conn_record.metadata_set(session, key="endorser_info", value=value) - - except Exception: - # log the error, but continue - LOGGER.exception( - "Error accepting endorser invitation/configuring endorser connection: %s", - ) + await attempt_auto_author_with_endorser_setup(profile) async def on_shutdown_event(profile: Profile, event: Event): diff --git a/aries_cloudagent/utils/endorsement_setup.py b/aries_cloudagent/utils/endorsement_setup.py new file mode 100644 index 0000000000..69e7facedb --- /dev/null +++ b/aries_cloudagent/utils/endorsement_setup.py @@ -0,0 +1,104 @@ +"""Common endorsement utilities.""" + +import logging + +from ..connections.models.conn_record import ConnRecord +from ..core.profile import Profile +from ..protocols.connections.v1_0.manager import ConnectionManager +from ..protocols.connections.v1_0.messages.connection_invitation import ( + ConnectionInvitation, +) +from ..protocols.endorse_transaction.v1_0.manager import TransactionManager +from ..protocols.endorse_transaction.v1_0.transaction_jobs import TransactionJob +from ..protocols.endorse_transaction.v1_0.util import ( + get_endorser_connection_id, + is_author_role, +) +from ..protocols.out_of_band.v1_0.manager import OutOfBandManager +from ..protocols.out_of_band.v1_0.messages.invitation import InvitationMessage + +LOGGER = logging.getLogger(__name__) + + +async def attempt_auto_author_with_endorser_setup(profile: Profile): + """Automatically setup the author's endorser connection if possible.""" + + if not is_author_role(profile): + return + + endorser_invitation = profile.settings.get_value("endorser.endorser_invitation") + if not endorser_invitation: + LOGGER.info("No endorser invitation, can't connect automatically.") + return + + endorser_alias = profile.settings.get_value("endorser.endorser_alias") + if not endorser_alias: + LOGGER.info("No endorser alias, alias is required if invitation is specified.") + return + + connection_id = await get_endorser_connection_id(profile) + if connection_id: + LOGGER.info("Connected to endorser from previous connection.") + return + + endorser_did = profile.settings.get_value("endorser.endorser_public_did") + if not endorser_did: + LOGGER.info( + "No endorser DID, can connect, but can't setup connection metadata." + ) + return + + try: + # OK, we are an author, we have no endorser connection but we have enough info + # to automatically initiate the connection + invite = InvitationMessage.from_url(endorser_invitation) + if invite: + oob_mgr = OutOfBandManager(profile) + oob_record = await oob_mgr.receive_invitation( + invitation=invite, + auto_accept=True, + alias=endorser_alias, + ) + async with profile.session() as session: + conn_record = await ConnRecord.retrieve_by_id( + session, oob_record.connection_id + ) + else: + invite = ConnectionInvitation.from_url(endorser_invitation) + if invite: + conn_mgr = ConnectionManager(profile) + conn_record = await conn_mgr.receive_invitation( + invitation=invite, + auto_accept=True, + alias=endorser_alias, + ) + else: + raise Exception( + "Failed to establish endorser connection, invalid " + "invitation format." + ) + + # configure the connection role and info (don't need to wait for the connection) + transaction_mgr = TransactionManager(profile) + await transaction_mgr.set_transaction_my_job( + record=conn_record, + transaction_my_job=TransactionJob.TRANSACTION_AUTHOR.name, + ) + + async with profile.session() as session: + value = await conn_record.metadata_get(session, "endorser_info") + if value: + value["endorser_did"] = endorser_did + value["endorser_name"] = endorser_alias + else: + value = {"endorser_did": endorser_did, "endorser_name": endorser_alias} + await conn_record.metadata_set(session, key="endorser_info", value=value) + + LOGGER.info( + "Successfully connected to endorser from invitation, and setup connection metadata." # noqa: E501 + ) + + except Exception: + LOGGER.info( + "Error accepting endorser invitation/configuring endorser connection" + ) diff --git a/aries_cloudagent/utils/tests/test_endorsement_setup.py b/aries_cloudagent/utils/tests/test_endorsement_setup.py new file mode 100644 index 0000000000..3e4f17de5b --- /dev/null +++ b/aries_cloudagent/utils/tests/test_endorsement_setup.py @@ -0,0 +1,64 @@ +from unittest import IsolatedAsyncioTestCase + +from aries_cloudagent.tests import mock + +from ...connections.models.conn_record import ConnRecord +from ...core.in_memory.profile import InMemoryProfile +from .. import endorsement_setup +from ..endorsement_setup import attempt_auto_author_with_endorser_setup + +mock_invitation = "http://localhost:9030?oob=eyJAdHlwZSI6ICJodHRwczovL2RpZGNvbW0ub3JnL291dC1vZi1iYW5kLzEuMS9pbnZpdGF0aW9uIiwgIkBpZCI6ICI2MWU1MmYzZS1kNTliLTQ3OWYtYmYwNC04NjJlOTk1MmM4MDYiLCAibGFiZWwiOiAiZW5kb3JzZXIiLCAiaGFuZHNoYWtlX3Byb3RvY29scyI6IFsiaHR0cHM6Ly9kaWRjb21tLm9yZy9kaWRleGNoYW5nZS8xLjAiXSwgInNlcnZpY2VzIjogW3siaWQiOiAiI2lubGluZSIsICJ0eXBlIjogImRpZC1jb21tdW5pY2F0aW9uIiwgInJlY2lwaWVudEtleXMiOiBbImRpZDprZXk6ejZNa2VkRDMyZlZmOG5ReG5SS2QzUmQ5S1hZQnVETEJiOHUyM1JWMm1ReFlpanR2I3o2TWtlZEQzMmZWZjhuUXhuUktkM1JkOUtYWUJ1RExCYjh1MjNSVjJtUXhZaWp0diJdLCAic2VydmljZUVuZHBvaW50IjogImh0dHA6Ly9sb2NhbGhvc3Q6OTAzMCJ9XX0=" + + +class MockConnRecord: + connection_id = "test-connection-id" + + +class TestEndorsementSetupUtil(IsolatedAsyncioTestCase): + def setUp(self) -> None: + self.profile = InMemoryProfile.test_profile() + + @mock.patch.object(endorsement_setup.LOGGER, "info", return_value=mock.MagicMock()) + async def test_not_enough_configs_for_connection(self, mock_logger): + await endorsement_setup.attempt_auto_author_with_endorser_setup(self.profile) + + # No invitation + self.profile.settings.set_value("endorser.author", True) + await endorsement_setup.attempt_auto_author_with_endorser_setup(self.profile) + + # No endorser alias + self.profile.settings.set_value("endorser.endorser_invitation", mock_invitation) + await endorsement_setup.attempt_auto_author_with_endorser_setup(self.profile) + + # No endorser DID + self.profile.settings.set_value("endorser.endorser_alias", "test-alias") + await endorsement_setup.attempt_auto_author_with_endorser_setup(self.profile) + + assert mock_logger.call_count == 3 + for call in mock_logger.call_args_list: + assert "Error accepting endorser invitation" not in call[0][0] + + @mock.patch.object(endorsement_setup.LOGGER, "info", return_value=mock.MagicMock()) + @mock.patch.object(endorsement_setup, "OutOfBandManager") + @mock.patch.object( + ConnRecord, + "retrieve_by_id", + return_value=ConnRecord(connection_id="test-connection-id"), + ) + async def test_create_connection_with_valid_invitation( + self, mock_conn_record, mock_oob_manager, mock_logger + ): + mock_oob_manager.return_value.receive_invitation = mock.CoroutineMock( + return_value=MockConnRecord() + ) + self.profile.settings.set_value("endorser.author", True) + self.profile.settings.set_value("endorser.endorser_invitation", mock_invitation) + self.profile.settings.set_value("endorser.endorser_alias", "test-alias") + self.profile.settings.set_value("endorser.endorser_public_did", "test-did") + + await attempt_auto_author_with_endorser_setup(self.profile) + + for call in mock_logger.call_args_list: + assert "Error accepting endorser invitation" not in call[0][0] + + assert mock_conn_record.called From c4f320f9da51cfde66b934f527466768703d1201 Mon Sep 17 00:00:00 2001 From: jamshale Date: Tue, 20 Feb 2024 18:02:58 +0000 Subject: [PATCH 36/69] Add anoncreds multitenant endorsement integration tests Signed-off-by: jamshale --- demo/features/0586-sign-transaction.feature | 2 ++ 1 file changed, 2 insertions(+) diff --git a/demo/features/0586-sign-transaction.feature b/demo/features/0586-sign-transaction.feature index 06bea76d3b..19e40c315e 100644 --- a/demo/features/0586-sign-transaction.feature +++ b/demo/features/0586-sign-transaction.feature @@ -39,6 +39,7 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions | --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | anoncreds-testing | | --wallet-type askar-anoncreds | | driverslicense | | | --wallet-type askar-anoncreds | anoncreds-testing | + | | --wallet-type askar-anoncreds --multitenant | anoncreds-testing | @T001.1-RFC0586 @GHA @@ -234,3 +235,4 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation --wallet-type askar-anoncreds | anoncreds-testing | Data_AC_NormalizedValues | + | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation --multitanant --wallet-type askar-anoncreds | anoncreds-testing | Data_AC_NormalizedValues | From f47719d58a56642e8247b077347725f5d1f186eb Mon Sep 17 00:00:00 2001 From: jamshale Date: Tue, 20 Feb 2024 21:29:30 +0000 Subject: [PATCH 37/69] Get and create anoncreds profile when using anoncreds subwallet Signed-off-by: jamshale --- .../multitenant/askar_profile_manager.py | 9 +++++ .../tests/test_askar_profile_manager.py | 36 +++++++++++++++++++ 2 files changed, 45 insertions(+) diff --git a/aries_cloudagent/multitenant/askar_profile_manager.py b/aries_cloudagent/multitenant/askar_profile_manager.py index 93f2456609..22a118cf24 100644 --- a/aries_cloudagent/multitenant/askar_profile_manager.py +++ b/aries_cloudagent/multitenant/askar_profile_manager.py @@ -2,6 +2,7 @@ from typing import Iterable, Optional, cast +from ..askar.profile_anon import AskarAnoncredsProfile from ..askar.profile import AskarProfile from ..config.injection_context import InjectionContext from ..config.wallet import wallet_config @@ -104,6 +105,14 @@ async def get_wallet_profile( assert self._multitenant_profile.opened + # return anoncreds profile if explicitly set as wallet type + if profile_context.settings.get("wallet.type") == "askar-anoncreds": + return AskarAnoncredsProfile( + self._multitenant_profile.opened, + profile_context, + profile_id=wallet_record.wallet_id, + ) + return AskarProfile( self._multitenant_profile.opened, profile_context, diff --git a/aries_cloudagent/multitenant/tests/test_askar_profile_manager.py b/aries_cloudagent/multitenant/tests/test_askar_profile_manager.py index 2070f835ab..30892c1b2b 100644 --- a/aries_cloudagent/multitenant/tests/test_askar_profile_manager.py +++ b/aries_cloudagent/multitenant/tests/test_askar_profile_manager.py @@ -99,6 +99,42 @@ def side_effect(context, provision): == wallet_record.wallet_id ) + async def test_get_anoncreds_wallet_profile_should_open_store_and_return_anoncreds_profile( + self, + ): + askar_profile_mock_name = "AskarProfile" + wallet_record = WalletRecord( + wallet_id="test", + settings={ + "wallet.recreate": True, + "wallet.seed": "test_seed", + "wallet.name": "test_name", + "wallet.type": "askar-anoncreds", + "wallet.rekey": "test_rekey", + }, + ) + + with mock.patch( + "aries_cloudagent.multitenant.askar_profile_manager.wallet_config" + ) as wallet_config, mock.patch( + "aries_cloudagent.multitenant.askar_profile_manager.AskarAnoncredsProfile", + ) as AskarAnoncredsProfile: + sub_wallet_profile_context = InjectionContext() + sub_wallet_profile = AskarAnoncredsProfile(None, None) + sub_wallet_profile.context.copy.return_value = sub_wallet_profile_context + + def side_effect(context, provision): + sub_wallet_profile.name = askar_profile_mock_name + return sub_wallet_profile, None + + wallet_config.side_effect = side_effect + + await self.manager.get_wallet_profile(self.profile.context, wallet_record) + + AskarAnoncredsProfile.assert_called_with( + sub_wallet_profile.opened, sub_wallet_profile_context, profile_id="test" + ) + async def test_get_wallet_profile_should_create_profile(self): wallet_record = WalletRecord(wallet_id="test", settings={}) create_profile_stub = asyncio.Future() From 63aac4db96804019458ed62e6989420d14a599af Mon Sep 17 00:00:00 2001 From: jamshale Date: Tue, 20 Feb 2024 22:45:45 +0000 Subject: [PATCH 38/69] Fix spelling / Reduce GHA endorsement tests slightly Signed-off-by: jamshale --- demo/features/0586-sign-transaction.feature | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/demo/features/0586-sign-transaction.feature b/demo/features/0586-sign-transaction.feature index 19e40c315e..3a57b76e08 100644 --- a/demo/features/0586-sign-transaction.feature +++ b/demo/features/0586-sign-transaction.feature @@ -36,13 +36,11 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions @WalletType_Askar_AnonCreds @GHA Examples: | Acme_capabilities | Bob_capabilities | Schema_name | - | --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | anoncreds-testing | | --wallet-type askar-anoncreds | | driverslicense | | | --wallet-type askar-anoncreds | anoncreds-testing | - | | --wallet-type askar-anoncreds --multitenant | anoncreds-testing | - @T001.1-RFC0586 @GHA + @T001.1-RFC0586 Scenario Outline: endorse a transaction and write to the ledger Given we have "2" agents | name | role | capabilities | @@ -121,7 +119,7 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --revocation --public-did --did-exchange | --revocation --did-exchange --wallet-type askar-anoncreds | anoncreds-testing | Data_AC_NormalizedValues | - @T002.1-RFC0586 @GHA + @T002.1-RFC0586 Scenario Outline: endorse a schema and cred def transaction, write to the ledger, issue and revoke a credential, manually invoking each endorsement endpoint Given we have "2" agents | name | role | capabilities | @@ -235,4 +233,4 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation --wallet-type askar-anoncreds | anoncreds-testing | Data_AC_NormalizedValues | - | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation --multitanant --wallet-type askar-anoncreds | anoncreds-testing | Data_AC_NormalizedValues | + | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation --multitenant --wallet-type askar-anoncreds | anoncreds-testing | Data_AC_NormalizedValues | From f28b73236f287b92f3f0b141f44a5f1060c7c91a Mon Sep 17 00:00:00 2001 From: Stephen Curran Date: Wed, 21 Feb 2024 10:53:31 -0800 Subject: [PATCH 39/69] Update publish-docs to operate on main and on branches prefixed with docs-v Signed-off-by: Stephen Curran --- .github/workflows/publish-docs.yml | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index 82cbdd3554..708fe60fa9 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -2,13 +2,13 @@ name: publish-docs on: push: - # Publish `main` as latest + # Publish `main` as latest, and when pushes are done to branches with "v-doc" prefix branches: - main + - docs-v* - # Publish `v1.2.3` tags as releases - tags: - - v* + create: + # Publish any `docs-v` branches -- check below to not run on other created branches permissions: contents: write @@ -39,13 +39,15 @@ jobs: # Strip git ref prefix from version echo "${{ github.ref }}" VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,') - # Strip "v" prefix from tag name - [[ "${{ github.ref }}" == "refs/tags/"* ]] && ALIAS=$(echo $VERSION | sed -e 's/^v//') + # If this is for a branch other than main or one starting with "docs-v" then exit happily + [[ "$VERSION" != "main" && "$VERSION" != "docs-v"* ]] && echo Not a docs branch...exiting && exit 0 + # Strip "docs-v" prefix from branch name + [[ "$VERSION" == "docs-v"* ]] && ALIAS=$(echo $VERSION | sed -e 's/^docs-v//') # Copy all of the root level md files into the docs folder for deployment, tweaking the relative paths for i in *.md; do sed -e "s#docs/#./#g" $i >docs/$i; done # Populate overrides for the current version, and then remove to not apply if VERSION is main branch echo -e "{% extends "base.html" %}\n\n{% block outdated %}\n You are viewing the documentation for ACA-Py Release $VERSION.\n{% endblock %}" >overrides/base.html - # If building from main, use latest as ALIAS + # If building from main, use latest as ALIAS and remove the base.html override [ "$VERSION" == "main" ] && ALIAS=latest && rm overrides/base.html echo $VERSION $ALIAS mike deploy --push --update-aliases $VERSION $ALIAS From bc48944ceab6d8925e81c8888fdab22fafb069a0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 20:50:04 +0000 Subject: [PATCH 40/69] chore(deps): Bump cryptography from 42.0.3 to 42.0.4 Bumps [cryptography](https://github.com/pyca/cryptography) from 42.0.3 to 42.0.4. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/42.0.3...42.0.4) --- updated-dependencies: - dependency-name: cryptography dependency-type: indirect ... Signed-off-by: dependabot[bot] --- poetry.lock | 77 ++++++++++++++++++++++++++++++----------------------- 1 file changed, 44 insertions(+), 33 deletions(-) diff --git a/poetry.lock b/poetry.lock index e40bdffbfb..10b2374bb4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -619,43 +619,43 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.3" +version = "42.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:de5086cd475d67113ccb6f9fae6d8fe3ac54a4f9238fd08bfdb07b03d791ff0a"}, - {file = "cryptography-42.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:935cca25d35dda9e7bd46a24831dfd255307c55a07ff38fd1a92119cffc34857"}, - {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20100c22b298c9eaebe4f0b9032ea97186ac2555f426c3e70670f2517989543b"}, - {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2eb6368d5327d6455f20327fb6159b97538820355ec00f8cc9464d617caecead"}, - {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:39d5c93e95bcbc4c06313fc6a500cee414ee39b616b55320c1904760ad686938"}, - {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d96ea47ce6d0055d5b97e761d37b4e84195485cb5a38401be341fabf23bc32a"}, - {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d1998e545081da0ab276bcb4b33cce85f775adb86a516e8f55b3dac87f469548"}, - {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:93fbee08c48e63d5d1b39ab56fd3fdd02e6c2431c3da0f4edaf54954744c718f"}, - {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:90147dad8c22d64b2ff7331f8d4cddfdc3ee93e4879796f837bdbb2a0b141e0c"}, - {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4dcab7c25e48fc09a73c3e463d09ac902a932a0f8d0c568238b3696d06bf377b"}, - {file = "cryptography-42.0.3-cp37-abi3-win32.whl", hash = "sha256:1e935c2900fb53d31f491c0de04f41110351377be19d83d908c1fd502ae8daa5"}, - {file = "cryptography-42.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:762f3771ae40e111d78d77cbe9c1035e886ac04a234d3ee0856bf4ecb3749d54"}, - {file = "cryptography-42.0.3-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3ec384058b642f7fb7e7bff9664030011ed1af8f852540c76a1317a9dd0d20"}, - {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35772a6cffd1f59b85cb670f12faba05513446f80352fe811689b4e439b5d89e"}, - {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04859aa7f12c2b5f7e22d25198ddd537391f1695df7057c8700f71f26f47a129"}, - {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c3d1f5a1d403a8e640fa0887e9f7087331abb3f33b0f2207d2cc7f213e4a864c"}, - {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df34312149b495d9d03492ce97471234fd9037aa5ba217c2a6ea890e9166f151"}, - {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:de4ae486041878dc46e571a4c70ba337ed5233a1344c14a0790c4c4be4bbb8b4"}, - {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0fab2a5c479b360e5e0ea9f654bcebb535e3aa1e493a715b13244f4e07ea8eec"}, - {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25b09b73db78facdfd7dd0fa77a3f19e94896197c86e9f6dc16bce7b37a96504"}, - {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d5cf11bc7f0b71fb71af26af396c83dfd3f6eed56d4b6ef95d57867bf1e4ba65"}, - {file = "cryptography-42.0.3-cp39-abi3-win32.whl", hash = "sha256:0fea01527d4fb22ffe38cd98951c9044400f6eff4788cf52ae116e27d30a1ba3"}, - {file = "cryptography-42.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:2619487f37da18d6826e27854a7f9d4d013c51eafb066c80d09c63cf24505306"}, - {file = "cryptography-42.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ead69ba488f806fe1b1b4050febafdbf206b81fa476126f3e16110c818bac396"}, - {file = "cryptography-42.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:20180da1b508f4aefc101cebc14c57043a02b355d1a652b6e8e537967f1e1b46"}, - {file = "cryptography-42.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fbf0f3f0fac7c089308bd771d2c6c7b7d53ae909dce1db52d8e921f6c19bb3a"}, - {file = "cryptography-42.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c23f03cfd7d9826cdcbad7850de67e18b4654179e01fe9bc623d37c2638eb4ef"}, - {file = "cryptography-42.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db0480ffbfb1193ac4e1e88239f31314fe4c6cdcf9c0b8712b55414afbf80db4"}, - {file = "cryptography-42.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:6c25e1e9c2ce682d01fc5e2dde6598f7313027343bd14f4049b82ad0402e52cd"}, - {file = "cryptography-42.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9541c69c62d7446539f2c1c06d7046aef822940d248fa4b8962ff0302862cc1f"}, - {file = "cryptography-42.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b797099d221df7cce5ff2a1d272761d1554ddf9a987d3e11f6459b38cd300fd"}, - {file = "cryptography-42.0.3.tar.gz", hash = "sha256:069d2ce9be5526a44093a0991c450fe9906cdf069e0e7cd67d9dee49a62b9ebe"}, + {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449"}, + {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b"}, + {file = "cryptography-42.0.4-cp37-abi3-win32.whl", hash = "sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925"}, + {file = "cryptography-42.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923"}, + {file = "cryptography-42.0.4-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0"}, + {file = "cryptography-42.0.4-cp39-abi3-win32.whl", hash = "sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129"}, + {file = "cryptography-42.0.4-cp39-abi3-win_amd64.whl", hash = "sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660"}, + {file = "cryptography-42.0.4.tar.gz", hash = "sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb"}, ] [package.dependencies] @@ -2177,6 +2177,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2184,8 +2185,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2202,6 +2211,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2209,6 +2219,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, From 773c08aa1d46db9afe7b9c9a3f5194bcdbf6c25b Mon Sep 17 00:00:00 2001 From: Stephen Curran Date: Wed, 21 Feb 2024 14:02:24 -0800 Subject: [PATCH 41/69] GHActions tweak Signed-off-by: Stephen Curran --- .github/workflows/publish-docs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index 708fe60fa9..be7d5de0ca 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -46,7 +46,7 @@ jobs: # Copy all of the root level md files into the docs folder for deployment, tweaking the relative paths for i in *.md; do sed -e "s#docs/#./#g" $i >docs/$i; done # Populate overrides for the current version, and then remove to not apply if VERSION is main branch - echo -e "{% extends "base.html" %}\n\n{% block outdated %}\n You are viewing the documentation for ACA-Py Release $VERSION.\n{% endblock %}" >overrides/base.html + echo -e "{% extends "base.html" %}\n\n{% block outdated %}\n You are viewing the documentation for ACA-Py Release $VERSION.\n{% endblock %}" >overrides/main.html # If building from main, use latest as ALIAS and remove the base.html override [ "$VERSION" == "main" ] && ALIAS=latest && rm overrides/base.html echo $VERSION $ALIAS From 5b2c9076df29bcc288384b57de08d971c2e986a8 Mon Sep 17 00:00:00 2001 From: Stephen Curran Date: Thu, 22 Feb 2024 06:55:41 -0800 Subject: [PATCH 42/69] More updates to get docs publishing Signed-off-by: Stephen Curran --- .github/workflows/publish-docs.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index be7d5de0ca..4da56d8a58 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -46,9 +46,10 @@ jobs: # Copy all of the root level md files into the docs folder for deployment, tweaking the relative paths for i in *.md; do sed -e "s#docs/#./#g" $i >docs/$i; done # Populate overrides for the current version, and then remove to not apply if VERSION is main branch - echo -e "{% extends "base.html" %}\n\n{% block outdated %}\n You are viewing the documentation for ACA-Py Release $VERSION.\n{% endblock %}" >overrides/main.html - # If building from main, use latest as ALIAS and remove the base.html override - [ "$VERSION" == "main" ] && ALIAS=latest && rm overrides/base.html + OVERRIDE=overrides/main.html + echo -e "{% extends \"base.html\" %}\n\n{% block outdated %}\n You are viewing the documentation for ACA-Py Release $ALIAS.\n{% endblock %}" >$OVERRIDE + # If building from main, use latest as ALIAS and remove the override + [ "$VERSION" == "main" ] && ALIAS=latest && rm $OVERRIDE echo $VERSION $ALIAS mike deploy --push --update-aliases $VERSION $ALIAS mike set-default latest From f866ff31b99ba27727e3e1b94692fb0c00d14b2a Mon Sep 17 00:00:00 2001 From: Stephen Curran Date: Thu, 22 Feb 2024 11:29:54 -0800 Subject: [PATCH 43/69] Eliminate the double workflow event Signed-off-by: Stephen Curran --- .github/workflows/publish-docs.yml | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index 4da56d8a58..e22d7763ad 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -7,9 +7,6 @@ on: - main - docs-v* - create: - # Publish any `docs-v` branches -- check below to not run on other created branches - permissions: contents: write @@ -39,10 +36,8 @@ jobs: # Strip git ref prefix from version echo "${{ github.ref }}" VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,') - # If this is for a branch other than main or one starting with "docs-v" then exit happily - [[ "$VERSION" != "main" && "$VERSION" != "docs-v"* ]] && echo Not a docs branch...exiting && exit 0 # Strip "docs-v" prefix from branch name - [[ "$VERSION" == "docs-v"* ]] && ALIAS=$(echo $VERSION | sed -e 's/^docs-v//') + [[ "$VERSION" == "docs-v"* ]] && ALIAS=$(echo $VERSION | sed -e 's/^docs-v//') && VERSION=${ALIAS} # Copy all of the root level md files into the docs folder for deployment, tweaking the relative paths for i in *.md; do sed -e "s#docs/#./#g" $i >docs/$i; done # Populate overrides for the current version, and then remove to not apply if VERSION is main branch From 27a6741b5cb6886edc3e5d06722116c6dbe06ea9 Mon Sep 17 00:00:00 2001 From: jamshale Date: Thu, 22 Feb 2024 19:17:36 +0000 Subject: [PATCH 44/69] Fix - missing revocation notification Signed-off-by: jamshale --- aries_cloudagent/revocation/manager.py | 26 ++++++++++--------- .../revocation/tests/test_manager.py | 15 ++++++----- 2 files changed, 23 insertions(+), 18 deletions(-) diff --git a/aries_cloudagent/revocation/manager.py b/aries_cloudagent/revocation/manager.py index 824efe79c0..e6184cac2b 100644 --- a/aries_cloudagent/revocation/manager.py +++ b/aries_cloudagent/revocation/manager.py @@ -4,24 +4,24 @@ import logging from typing import Mapping, Optional, Sequence, Text, Tuple -from ..protocols.revocation_notification.v1_0.models.rev_notification_record import ( - RevNotificationRecord, -) from ..connections.models.conn_record import ConnRecord from ..core.error import BaseError from ..core.profile import Profile from ..indy.issuer import IndyIssuer -from ..storage.error import StorageNotFoundError -from .indy import IndyRevocation -from .models.issuer_cred_rev_record import IssuerCredRevRecord -from .models.issuer_rev_reg_record import IssuerRevRegRecord -from .util import notify_pending_cleared_event, notify_revocation_published_event from ..protocols.issue_credential.v1_0.models.credential_exchange import ( V10CredentialExchange, ) from ..protocols.issue_credential.v2_0.models.cred_ex_record import ( V20CredExRecord, ) +from ..protocols.revocation_notification.v1_0.models.rev_notification_record import ( + RevNotificationRecord, +) +from ..storage.error import StorageNotFoundError +from .indy import IndyRevocation +from .models.issuer_cred_rev_record import IssuerCredRevRecord +from .models.issuer_rev_reg_record import IssuerRevRegRecord +from .util import notify_pending_cleared_event, notify_revocation_published_event class RevocationManagerError(BaseError): @@ -180,6 +180,9 @@ async def revoke_credential( write_ledger=write_ledger, endorser_did=endorser_did, ) + await notify_revocation_published_event( + self._profile, rev_reg_id, [cred_rev_id] + ) return rev_entry_resp else: async with self._profile.transaction() as txn: @@ -296,10 +299,9 @@ async def publish_pending_revocations( rev_entry_resp = await issuer_rr_upd.send_entry(self._profile) published = sorted(crid for crid in crids if crid not in failed_crids) result[issuer_rr_rec.revoc_reg_id] = published - if not connection_id: - await notify_revocation_published_event( - self._profile, issuer_rr_rec.revoc_reg_id, crids - ) + await notify_revocation_published_event( + self._profile, issuer_rr_rec.revoc_reg_id, published + ) return rev_entry_resp, result diff --git a/aries_cloudagent/revocation/tests/test_manager.py b/aries_cloudagent/revocation/tests/test_manager.py index 4d0689664f..9c0e9e7783 100644 --- a/aries_cloudagent/revocation/tests/test_manager.py +++ b/aries_cloudagent/revocation/tests/test_manager.py @@ -1,11 +1,10 @@ import json - -from aries_cloudagent.tests import mock from unittest import IsolatedAsyncioTestCase from aries_cloudagent.revocation.models.issuer_cred_rev_record import ( IssuerCredRevRecord, ) +from aries_cloudagent.tests import mock from ...connections.models.conn_record import ConnRecord from ...core.in_memory import InMemoryProfile @@ -14,11 +13,8 @@ V10CredentialExchange, ) from ...protocols.issue_credential.v2_0.models.cred_ex_record import V20CredExRecord - - -from ..manager import RevocationManager, RevocationManagerError - from .. import manager as test_module +from ..manager import RevocationManager, RevocationManagerError TEST_DID = "LjgpST2rjsoxYegQDRm7EL" SCHEMA_NAME = "bc-reg" @@ -87,6 +83,7 @@ async def test_revoke_credential_publish(self): revoc.return_value.get_ledger_registry = mock.CoroutineMock( return_value=mock_rev_reg ) + test_module.notify_revocation_published_event = mock.CoroutineMock() await self.manager.revoke_credential_by_cred_ex_id(CRED_EX_ID, publish=True) @@ -96,6 +93,7 @@ async def test_revoke_credential_publish(self): mock_issuer_rev_reg_record.tails_local_path, ["2", "1"], ) + assert test_module.notify_revocation_published_event.called async def test_revoke_credential_publish_endorser(self): conn_record = ConnRecord( @@ -170,6 +168,7 @@ async def test_revoke_credential_publish_endorser(self): revoc.return_value.get_ledger_registry = mock.CoroutineMock( return_value=mock_rev_reg ) + test_module.notify_revocation_published_event = mock.CoroutineMock() await self.manager.revoke_credential_by_cred_ex_id( cred_ex_id=CRED_EX_ID, @@ -178,6 +177,8 @@ async def test_revoke_credential_publish_endorser(self): write_ledger=False, ) + assert test_module.notify_revocation_published_event.called + issuer.revoke_credentials.assert_awaited_once_with( mock_issuer_rev_reg_record.cred_def_id, mock_issuer_rev_reg_record.revoc_reg_id, @@ -384,6 +385,7 @@ async def test_publish_pending_revocations_endorser(self): side_effect=[(json.dumps(delta), []) for delta in deltas] ) self.profile.context.injector.bind_instance(IndyIssuer, issuer) + test_module.notify_revocation_published_event = mock.CoroutineMock() manager = RevocationManager(self.profile) _, result = await manager.publish_pending_revocations( rrid2crid={REV_REG_ID: "2"}, connection_id=conn_id @@ -391,6 +393,7 @@ async def test_publish_pending_revocations_endorser(self): assert result == {REV_REG_ID: ["2"]} mock_issuer_rev_reg_records[0].clear_pending.assert_called_once() mock_issuer_rev_reg_records[1].clear_pending.assert_not_called() + assert test_module.notify_revocation_published_event.called async def test_publish_pending_revocations_endorser_x(self): deltas = [ From 3739c3a2541182b33c6f86485598b90d468cd4bd Mon Sep 17 00:00:00 2001 From: jamshale Date: Fri, 23 Feb 2024 21:14:02 +0000 Subject: [PATCH 45/69] Notify revocation when written to ledger Signed-off-by: jamshale --- .../anoncreds/default/legacy_indy/registry.py | 7 +++++++ aries_cloudagent/anoncreds/events.py | 8 +++++--- aries_cloudagent/anoncreds/revocation.py | 11 ++++++++--- aries_cloudagent/anoncreds/revocation_setup.py | 5 ++++- .../protocols/endorse_transaction/v1_0/manager.py | 7 +++++-- aries_cloudagent/revocation/manager.py | 6 ------ aries_cloudagent/revocation/tests/test_manager.py | 7 ------- aries_cloudagent/revocation/util.py | 4 ++-- aries_cloudagent/revocation_anoncreds/manager.py | 7 ------- 9 files changed, 31 insertions(+), 31 deletions(-) diff --git a/aries_cloudagent/anoncreds/default/legacy_indy/registry.py b/aries_cloudagent/anoncreds/default/legacy_indy/registry.py index c302f2348d..f092d61f43 100644 --- a/aries_cloudagent/anoncreds/default/legacy_indy/registry.py +++ b/aries_cloudagent/anoncreds/default/legacy_indy/registry.py @@ -47,6 +47,7 @@ BaseAnonCredsRegistrar, BaseAnonCredsResolver, ) +from ...events import RevListFinishedEvent from ...issuer import AnonCredsIssuer, AnonCredsIssuerError from ...models.anoncreds_cred_def import ( CredDef, @@ -966,6 +967,11 @@ async def update_revocation_list( ) if write_ledger: + await self.notify( + RevListFinishedEvent.with_payload( + curr_list.rev_reg_def_id, newly_revoked_indices + ) + ) return RevListResult( job_id=None, revocation_list_state=RevListState( @@ -983,6 +989,7 @@ async def update_revocation_list( "context": { "job_id": job_id, "rev_reg_def_id": rev_reg_def_id, + "rev_list": curr_list.serialize(), "options": { "endorser_connection_id": endorser_connection_id, "create_transaction_for_endorser": create_transaction, diff --git a/aries_cloudagent/anoncreds/events.py b/aries_cloudagent/anoncreds/events.py index 992feb21f6..7be3bc8603 100644 --- a/aries_cloudagent/anoncreds/events.py +++ b/aries_cloudagent/anoncreds/events.py @@ -109,7 +109,8 @@ def payload(self) -> RevRegDefFinishedPayload: class RevListFinishedPayload(NamedTuple): """Payload of rev list finished event.""" - rev_reg_def_id: str + rev_reg_id: str + revoked: list options: dict @@ -131,11 +132,12 @@ def __init__(self, payload: RevListFinishedPayload): @classmethod def with_payload( cls, - rev_reg_def_id: str, + rev_reg_id: str, + revoked: list, options: Optional[dict] = None, ): """With payload.""" - payload = RevListFinishedPayload(rev_reg_def_id, options) + payload = RevListFinishedPayload(rev_reg_id, revoked, options) return cls(payload) @property diff --git a/aries_cloudagent/anoncreds/revocation.py b/aries_cloudagent/anoncreds/revocation.py index 70cea45e64..0854c4a0b4 100644 --- a/aries_cloudagent/anoncreds/revocation.py +++ b/aries_cloudagent/anoncreds/revocation.py @@ -494,7 +494,7 @@ async def store_revocation_registry_list(self, result: RevListResult): if result.revocation_list_state.state == STATE_FINISHED: await self.notify( - RevListFinishedEvent.with_payload(rev_list.rev_reg_def_id) + RevListFinishedEvent.with_payload(rev_list.rev_reg_def_id, rev_list) ) except AskarError as err: @@ -502,7 +502,9 @@ async def store_revocation_registry_list(self, result: RevListResult): "Error saving new revocation registry" ) from err - async def finish_revocation_list(self, job_id: str, rev_reg_def_id: str): + async def finish_revocation_list( + self, job_id: str, rev_reg_def_id: str, revoked: list + ): """Mark a revocation list as finished.""" async with self.profile.transaction() as txn: # Finish the registration if the list is new, otherwise already updated @@ -519,7 +521,10 @@ async def finish_revocation_list(self, job_id: str, rev_reg_def_id: str): state=STATE_FINISHED, ) await txn.commit() - await self.notify(RevListFinishedEvent.with_payload(rev_reg_def_id)) + # Notify about revoked creds on any list update + await self.notify( + RevListFinishedEvent.with_payload(rev_reg_def_id, revoked) + ) async def update_revocation_list( self, diff --git a/aries_cloudagent/anoncreds/revocation_setup.py b/aries_cloudagent/anoncreds/revocation_setup.py index 8f16b382c9..c12b9060a8 100644 --- a/aries_cloudagent/anoncreds/revocation_setup.py +++ b/aries_cloudagent/anoncreds/revocation_setup.py @@ -8,6 +8,7 @@ from ..anoncreds.revocation import AnonCredsRevocation from ..core.event_bus import EventBus from ..core.profile import Profile +from ..revocation.util import notify_revocation_published_event from .events import ( CRED_DEF_FINISHED_PATTERN, REV_LIST_FINISHED_PATTERN, @@ -105,4 +106,6 @@ async def on_rev_reg_def(self, profile: Profile, event: RevRegDefFinishedEvent): async def on_rev_list(self, profile: Profile, event: RevListFinishedEvent): """Handle rev list finished.""" - LOGGER.debug("Revocation list finished: %s", event.payload.rev_reg_def_id) + await notify_revocation_published_event( + profile, event.payload.rev_reg_id, event.payload.revoked + ) diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/manager.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/manager.py index 03472f5f84..b59b4352cb 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/manager.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/manager.py @@ -863,14 +863,17 @@ async def endorsed_txn_post_processing( elif ledger_response["result"]["txn"]["type"] == "114": # revocation entry transaction rev_reg_id = ledger_response["result"]["txn"]["data"]["revocRegDefId"] + revoked = ledger_response["result"]["txn"]["data"]["value"].get( + "revoked", [] + ) meta_data["context"]["rev_reg_id"] = rev_reg_id if is_anoncreds: await AnonCredsRevocation(self._profile).finish_revocation_list( - meta_data["context"]["job_id"], rev_reg_id + meta_data["context"]["job_id"], rev_reg_id, revoked ) else: await notify_revocation_entry_endorsed_event( - self._profile, rev_reg_id, meta_data + self._profile, rev_reg_id, meta_data, revoked ) elif ledger_response["result"]["txn"]["type"] == "1": diff --git a/aries_cloudagent/revocation/manager.py b/aries_cloudagent/revocation/manager.py index e6184cac2b..e74c0b525a 100644 --- a/aries_cloudagent/revocation/manager.py +++ b/aries_cloudagent/revocation/manager.py @@ -180,9 +180,6 @@ async def revoke_credential( write_ledger=write_ledger, endorser_did=endorser_did, ) - await notify_revocation_published_event( - self._profile, rev_reg_id, [cred_rev_id] - ) return rev_entry_resp else: async with self._profile.transaction() as txn: @@ -299,9 +296,6 @@ async def publish_pending_revocations( rev_entry_resp = await issuer_rr_upd.send_entry(self._profile) published = sorted(crid for crid in crids if crid not in failed_crids) result[issuer_rr_rec.revoc_reg_id] = published - await notify_revocation_published_event( - self._profile, issuer_rr_rec.revoc_reg_id, published - ) return rev_entry_resp, result diff --git a/aries_cloudagent/revocation/tests/test_manager.py b/aries_cloudagent/revocation/tests/test_manager.py index 9c0e9e7783..6ebc48a330 100644 --- a/aries_cloudagent/revocation/tests/test_manager.py +++ b/aries_cloudagent/revocation/tests/test_manager.py @@ -83,7 +83,6 @@ async def test_revoke_credential_publish(self): revoc.return_value.get_ledger_registry = mock.CoroutineMock( return_value=mock_rev_reg ) - test_module.notify_revocation_published_event = mock.CoroutineMock() await self.manager.revoke_credential_by_cred_ex_id(CRED_EX_ID, publish=True) @@ -93,7 +92,6 @@ async def test_revoke_credential_publish(self): mock_issuer_rev_reg_record.tails_local_path, ["2", "1"], ) - assert test_module.notify_revocation_published_event.called async def test_revoke_credential_publish_endorser(self): conn_record = ConnRecord( @@ -168,7 +166,6 @@ async def test_revoke_credential_publish_endorser(self): revoc.return_value.get_ledger_registry = mock.CoroutineMock( return_value=mock_rev_reg ) - test_module.notify_revocation_published_event = mock.CoroutineMock() await self.manager.revoke_credential_by_cred_ex_id( cred_ex_id=CRED_EX_ID, @@ -177,8 +174,6 @@ async def test_revoke_credential_publish_endorser(self): write_ledger=False, ) - assert test_module.notify_revocation_published_event.called - issuer.revoke_credentials.assert_awaited_once_with( mock_issuer_rev_reg_record.cred_def_id, mock_issuer_rev_reg_record.revoc_reg_id, @@ -385,7 +380,6 @@ async def test_publish_pending_revocations_endorser(self): side_effect=[(json.dumps(delta), []) for delta in deltas] ) self.profile.context.injector.bind_instance(IndyIssuer, issuer) - test_module.notify_revocation_published_event = mock.CoroutineMock() manager = RevocationManager(self.profile) _, result = await manager.publish_pending_revocations( rrid2crid={REV_REG_ID: "2"}, connection_id=conn_id @@ -393,7 +387,6 @@ async def test_publish_pending_revocations_endorser(self): assert result == {REV_REG_ID: ["2"]} mock_issuer_rev_reg_records[0].clear_pending.assert_called_once() mock_issuer_rev_reg_records[1].clear_pending.assert_not_called() - assert test_module.notify_revocation_published_event.called async def test_publish_pending_revocations_endorser_x(self): deltas = [ diff --git a/aries_cloudagent/revocation/util.py b/aries_cloudagent/revocation/util.py index df40a17630..cc8c55209e 100644 --- a/aries_cloudagent/revocation/util.py +++ b/aries_cloudagent/revocation/util.py @@ -5,7 +5,6 @@ from ..core.profile import Profile - REVOCATION_EVENT_PREFIX = "acapy::REVOCATION::" EVENT_LISTENER_PATTERN = re.compile(f"^{REVOCATION_EVENT_PREFIX}(.*)?$") REVOCATION_REG_INIT_EVENT = "REGISTRY_INIT" @@ -52,11 +51,12 @@ async def notify_revocation_reg_endorsed_event( async def notify_revocation_entry_endorsed_event( - profile: Profile, rev_reg_id: str, meta_data: dict + profile: Profile, rev_reg_id: str, meta_data: dict, revoked: list ): """Send notification for a revocation registry entry endorsement event.""" topic = f"{REVOCATION_EVENT_PREFIX}{REVOCATION_ENTRY_ENDORSED_EVENT}::{rev_reg_id}" await profile.notify(topic, meta_data) + await notify_revocation_published_event(profile, rev_reg_id, revoked) async def notify_revocation_published_event( diff --git a/aries_cloudagent/revocation_anoncreds/manager.py b/aries_cloudagent/revocation_anoncreds/manager.py index a846396146..52e914ed23 100644 --- a/aries_cloudagent/revocation_anoncreds/manager.py +++ b/aries_cloudagent/revocation_anoncreds/manager.py @@ -16,7 +16,6 @@ ) from ..revocation.util import ( notify_pending_cleared_event, - notify_revocation_published_event, ) from ..storage.error import StorageNotFoundError from .models.issuer_cred_rev_record import IssuerCredRevRecord @@ -129,9 +128,6 @@ async def revoke_credential( result.revoked, options=options, ) - await notify_revocation_published_event( - self._profile, rev_reg_id, [cred_rev_id] - ) else: await revoc.mark_pending_revocations(rev_reg_id, int(cred_rev_id)) @@ -237,9 +233,6 @@ async def publish_pending_revocations( rrid, result.prev, result.curr, result.revoked, options ) published_crids[rrid] = sorted(result.revoked) - await notify_revocation_published_event( - self._profile, rrid, [str(crid) for crid in result.revoked] - ) return published_crids From 7644f2db124c384a65b747999165743a69d95a10 Mon Sep 17 00:00:00 2001 From: jamshale Date: Fri, 23 Feb 2024 21:25:43 +0000 Subject: [PATCH 46/69] Update unit tests Signed-off-by: jamshale --- aries_cloudagent/anoncreds/tests/test_revocation.py | 4 ++-- .../protocols/endorse_transaction/v1_0/tests/test_manager.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/aries_cloudagent/anoncreds/tests/test_revocation.py b/aries_cloudagent/anoncreds/tests/test_revocation.py index 4e82db18c1..fa117e8b15 100644 --- a/aries_cloudagent/anoncreds/tests/test_revocation.py +++ b/aries_cloudagent/anoncreds/tests/test_revocation.py @@ -598,13 +598,13 @@ async def test_finish_revocation_list(self, mock_finish, mock_handle): await self.revocation.finish_revocation_list( job_id="test-job-id", rev_reg_def_id="test-rev-reg-def-id", + revoked=[] ) assert mock_finish.called # Fetch finds list then there's nothing to do, it's already finished and updated await self.revocation.finish_revocation_list( - job_id="test-job-id", - rev_reg_def_id="test-rev-reg-def-id", + job_id="test-job-id", rev_reg_def_id="test-rev-reg-def-id", revoked=[] ) assert mock_finish.call_count == 1 diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_manager.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_manager.py index b46e989f5d..7ed097a921 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_manager.py @@ -940,7 +940,7 @@ async def test_endorsed_txn_post_processing_anoncreds( "txn": { "type": "114", "metadata": {"from": TEST_DID}, - "data": {"revocRegDefId": REV_REG_ID}, + "data": {"revocRegDefId": REV_REG_ID, "value": {"revoked": [1]}}, }, "txnMetadata": {"txnId": REV_REG_ID}, }, From 03e3622963fc8d32c412cf0c51afa5be78bff4e1 Mon Sep 17 00:00:00 2001 From: jamshale Date: Fri, 23 Feb 2024 21:28:47 +0000 Subject: [PATCH 47/69] Fix formatting Signed-off-by: jamshale --- aries_cloudagent/anoncreds/tests/test_revocation.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/aries_cloudagent/anoncreds/tests/test_revocation.py b/aries_cloudagent/anoncreds/tests/test_revocation.py index fa117e8b15..26a75d57c2 100644 --- a/aries_cloudagent/anoncreds/tests/test_revocation.py +++ b/aries_cloudagent/anoncreds/tests/test_revocation.py @@ -596,9 +596,7 @@ async def test_finish_revocation_list(self, mock_finish, mock_handle): # Fetch doesn't find list then it should be created await self.revocation.finish_revocation_list( - job_id="test-job-id", - rev_reg_def_id="test-rev-reg-def-id", - revoked=[] + job_id="test-job-id", rev_reg_def_id="test-rev-reg-def-id", revoked=[] ) assert mock_finish.called From 4992bbd05dc88a02fcb3ceacca212b943a18a822 Mon Sep 17 00:00:00 2001 From: jamshale Date: Fri, 23 Feb 2024 22:47:34 +0000 Subject: [PATCH 48/69] Fix anoncreds non-endorsement revocation Signed-off-by: jamshale --- aries_cloudagent/anoncreds/default/legacy_indy/registry.py | 7 +++++-- demo/features/revocation-api.feature | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/aries_cloudagent/anoncreds/default/legacy_indy/registry.py b/aries_cloudagent/anoncreds/default/legacy_indy/registry.py index f092d61f43..fd9df94a0b 100644 --- a/aries_cloudagent/anoncreds/default/legacy_indy/registry.py +++ b/aries_cloudagent/anoncreds/default/legacy_indy/registry.py @@ -12,6 +12,7 @@ from ....anoncreds.default.legacy_indy.author import get_endorser_info from ....cache.base import BaseCache from ....config.injection_context import InjectionContext +from ....core.event_bus import EventBus from ....core.profile import Profile from ....ledger.base import BaseLedger from ....ledger.error import ( @@ -967,10 +968,12 @@ async def update_revocation_list( ) if write_ledger: - await self.notify( + event_bus = profile.inject(EventBus) + await event_bus.notify( + profile, RevListFinishedEvent.with_payload( curr_list.rev_reg_def_id, newly_revoked_indices - ) + ), ) return RevListResult( job_id=None, diff --git a/demo/features/revocation-api.feature b/demo/features/revocation-api.feature index 9fcaa2d099..0049c42da3 100644 --- a/demo/features/revocation-api.feature +++ b/demo/features/revocation-api.feature @@ -39,7 +39,7 @@ Feature: ACA-Py Revocation API Then "Bob" can verify the credential from "" was revoked Examples: | issuer | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | Proof_request | - #| Acme | --revocation --public-did | | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | + | Acme | --revocation --public-did | | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | | Acme | --revocation --public-did --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | driverslicense_v2 | Data_DL_MaxValues | DL_age_over_19_v2 | @Revoc-api.x @GHA-Anoncreds-break From 5cf94f5b23be8255dc4b95c1ae19a6b80550241c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 21:52:12 +0000 Subject: [PATCH 49/69] chore(deps): Bump the all-actions group with 3 updates Bumps the all-actions group with 3 updates: [actions/checkout](https://github.com/actions/checkout), [actions/setup-python](https://github.com/actions/setup-python) and [actions/cache](https://github.com/actions/cache). Updates `actions/checkout` from 3 to 4 - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v3...v4) Updates `actions/setup-python` from 4 to 5 - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) Updates `actions/cache` from 2 to 4 - [Release notes](https://github.com/actions/cache/releases) - [Changelog](https://github.com/actions/cache/blob/main/RELEASES.md) - [Commits](https://github.com/actions/cache/compare/v2...v4) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions - dependency-name: actions/cache dependency-type: direct:production update-type: version-update:semver-major dependency-group: all-actions ... Signed-off-by: dependabot[bot] --- .github/workflows/publish-docs.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index e22d7763ad..6f0eeeb2a2 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -14,13 +14,13 @@ jobs: deploy: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 # fetch all commits/branches - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: 3.x - - uses: actions/cache@v2 + - uses: actions/cache@v4 with: key: ${{ github.ref }} path: .cache From f9b282ebf59187ba6ab80d0b4db879c33c1507ad Mon Sep 17 00:00:00 2001 From: jamshale <31809382+jamshale@users.noreply.github.com> Date: Thu, 29 Feb 2024 08:44:20 -0800 Subject: [PATCH 50/69] Allow for crids in event payload to be integers (#2819) Signed-off-by: jamshale --- .../revocation_notification/v1_0/routes.py | 15 ++++++---- .../v1_0/tests/test_routes.py | 29 +++++++++++++++++-- .../revocation_notification/v2_0/routes.py | 11 +++++-- .../v2_0/tests/test_routes.py | 27 +++++++++++++++-- 4 files changed, 71 insertions(+), 11 deletions(-) diff --git a/aries_cloudagent/protocols/revocation_notification/v1_0/routes.py b/aries_cloudagent/protocols/revocation_notification/v1_0/routes.py index 4b577315a2..a71c95125d 100644 --- a/aries_cloudagent/protocols/revocation_notification/v1_0/routes.py +++ b/aries_cloudagent/protocols/revocation_notification/v1_0/routes.py @@ -8,8 +8,8 @@ from ....messaging.responder import BaseResponder from ....revocation.util import ( REVOCATION_CLEAR_PENDING_EVENT, - REVOCATION_PUBLISHED_EVENT, REVOCATION_EVENT_PREFIX, + REVOCATION_PUBLISHED_EVENT, ) from ....storage.error import StorageError, StorageNotFoundError from .models.rev_notification_record import RevNotificationRecord @@ -31,10 +31,14 @@ def register_events(event_bus: EventBus): async def on_revocation_published(profile: Profile, event: Event): """Handle issuer revoke event.""" - LOGGER.debug("Sending notification of revocation to recipient: %s", event.payload) + LOGGER.debug("Received notification of revocation publication: %s", event.payload) + should_notify = profile.settings.get("revocation.notify", False) responder = profile.inject(BaseResponder) crids = event.payload.get("crids") or [] + # Allow for crids to be integers + if crids and isinstance(crids[0], int): + crids = [str(crid) for crid in crids] try: async with profile.session() as session: @@ -46,9 +50,10 @@ async def on_revocation_published(profile: Profile, event: Event): for record in records: await record.delete_record(session) - await responder.send( - record.to_message(), connection_id=record.connection_id - ) + if should_notify: + await responder.send( + record.to_message(), connection_id=record.connection_id + ) except StorageNotFoundError: LOGGER.info( diff --git a/aries_cloudagent/protocols/revocation_notification/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/revocation_notification/v1_0/tests/test_routes.py index 0cd23a8b04..4a4e0145dd 100644 --- a/aries_cloudagent/protocols/revocation_notification/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/revocation_notification/v1_0/tests/test_routes.py @@ -1,9 +1,9 @@ """Test routes.py""" -from aries_cloudagent.tests import mock import pytest -from .. import routes as test_module +from aries_cloudagent.tests import mock + from .....config.settings import Settings from .....core.event_bus import Event, MockEventBus from .....core.in_memory import InMemoryProfile @@ -15,6 +15,7 @@ REVOCATION_PUBLISHED_EVENT, ) from .....storage.error import StorageError, StorageNotFoundError +from .. import routes as test_module @pytest.fixture @@ -52,6 +53,8 @@ async def test_on_revocation_published(profile: Profile, responder: MockResponde assert isinstance(profile.settings, Settings) + profile.settings.set_value("revocation.notify", True) + with mock.patch.object(test_module, "RevNotificationRecord", MockRec): await test_module.on_revocation_published(profile, event) @@ -59,6 +62,28 @@ async def test_on_revocation_published(profile: Profile, responder: MockResponde mock_rec.delete_record.assert_called_once() assert responder.messages + # Test with integer crids + mock_rec.cred_rev_id = "1" + MockRec.query_by_rev_reg_id = mock.CoroutineMock(return_value=[mock_rec]) + event = Event(topic, {"rev_reg_id": "mock", "crids": [1]}) + + with mock.patch.object(test_module, "RevNotificationRecord", MockRec): + await test_module.on_revocation_published(profile, event) + + MockRec.query_by_rev_reg_id.assert_called_once() + assert mock_rec.delete_record.call_count == 2 + + # Test with empty crids + mock_rec.cred_rev_id = "1" + MockRec.query_by_rev_reg_id = mock.CoroutineMock(return_value=[mock_rec]) + event = Event(topic, {"rev_reg_id": "mock", "crids": []}) + + with mock.patch.object(test_module, "RevNotificationRecord", MockRec): + await test_module.on_revocation_published(profile, event) + + MockRec.query_by_rev_reg_id.assert_called_once() + assert mock_rec.delete_record.call_count == 2 + @pytest.mark.asyncio async def test_on_revocation_published_x_not_found( diff --git a/aries_cloudagent/protocols/revocation_notification/v2_0/routes.py b/aries_cloudagent/protocols/revocation_notification/v2_0/routes.py index 9c62a16c15..3ab25fec76 100644 --- a/aries_cloudagent/protocols/revocation_notification/v2_0/routes.py +++ b/aries_cloudagent/protocols/revocation_notification/v2_0/routes.py @@ -8,8 +8,8 @@ from ....messaging.responder import BaseResponder from ....revocation.util import ( REVOCATION_CLEAR_PENDING_EVENT, - REVOCATION_PUBLISHED_EVENT, REVOCATION_EVENT_PREFIX, + REVOCATION_PUBLISHED_EVENT, ) from ....storage.error import StorageError, StorageNotFoundError from .models.rev_notification_record import RevNotificationRecord @@ -31,11 +31,14 @@ def register_events(event_bus: EventBus): async def on_revocation_published(profile: Profile, event: Event): """Handle issuer revoke event.""" - LOGGER.debug("Sending notification of revocation to recipient: %s", event.payload) + LOGGER.debug("Received notification of revocation publication: %s", event.payload) should_notify = profile.settings.get("revocation.notify", False) responder = profile.inject(BaseResponder) crids = event.payload.get("crids") or [] + # Allow for crids to be integers + if crids and isinstance(crids[0], int): + crids = [str(crid) for crid in crids] try: async with profile.session() as session: @@ -51,6 +54,10 @@ async def on_revocation_published(profile: Profile, event: Event): await responder.send( record.to_message(), connection_id=record.connection_id ) + LOGGER.info( + "Sent revocation notification for credential to %s", + record.connection_id, + ) except StorageNotFoundError: LOGGER.info( diff --git a/aries_cloudagent/protocols/revocation_notification/v2_0/tests/test_routes.py b/aries_cloudagent/protocols/revocation_notification/v2_0/tests/test_routes.py index 526ed00e72..80f6e34b11 100644 --- a/aries_cloudagent/protocols/revocation_notification/v2_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/revocation_notification/v2_0/tests/test_routes.py @@ -1,9 +1,9 @@ """Test routes.py""" -from aries_cloudagent.tests import mock import pytest -from .. import routes as test_module +from aries_cloudagent.tests import mock + from .....config.settings import Settings from .....core.event_bus import Event, MockEventBus from .....core.in_memory import InMemoryProfile @@ -15,6 +15,7 @@ REVOCATION_PUBLISHED_EVENT, ) from .....storage.error import StorageError, StorageNotFoundError +from .. import routes as test_module @pytest.fixture @@ -60,6 +61,28 @@ async def test_on_revocation_published(profile: Profile, responder: MockResponde mock_rec.delete_record.assert_called_once() assert responder.messages + # Test with integer crids + mock_rec.cred_rev_id = "1" + MockRec.query_by_rev_reg_id = mock.CoroutineMock(return_value=[mock_rec]) + event = Event(topic, {"rev_reg_id": "mock", "crids": [1]}) + + with mock.patch.object(test_module, "RevNotificationRecord", MockRec): + await test_module.on_revocation_published(profile, event) + + MockRec.query_by_rev_reg_id.assert_called_once() + assert mock_rec.delete_record.call_count == 2 + + # Test with empty crids + mock_rec.cred_rev_id = "1" + MockRec.query_by_rev_reg_id = mock.CoroutineMock(return_value=[mock_rec]) + event = Event(topic, {"rev_reg_id": "mock", "crids": []}) + + with mock.patch.object(test_module, "RevNotificationRecord", MockRec): + await test_module.on_revocation_published(profile, event) + + MockRec.query_by_rev_reg_id.assert_called_once() + assert mock_rec.delete_record.call_count == 2 + @pytest.mark.asyncio async def test_on_revocation_published_no_notify( From 941b07f6431bcdbee07cf7cfd688e8dc04e319e1 Mon Sep 17 00:00:00 2001 From: Stephen Curran Date: Thu, 29 Feb 2024 14:13:36 -0800 Subject: [PATCH 51/69] GHA update for doc publishing, fix doc file that was blanked Signed-off-by: Stephen Curran --- .github/workflows/publish-docs.yml | 5 +- docs/demo/AliceWantsAJsonCredential.md | 584 +++++++++++++++++++++++++ 2 files changed, 586 insertions(+), 3 deletions(-) diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index e22d7763ad..ac4a06f5fc 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -35,9 +35,8 @@ jobs: run: | # Strip git ref prefix from version echo "${{ github.ref }}" - VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,') - # Strip "docs-v" prefix from branch name - [[ "$VERSION" == "docs-v"* ]] && ALIAS=$(echo $VERSION | sed -e 's/^docs-v//') && VERSION=${ALIAS} + # Extract the version and if it is in `docs-v` form, strip that off the version + VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,' -e 's/^docs-v//') # Copy all of the root level md files into the docs folder for deployment, tweaking the relative paths for i in *.md; do sed -e "s#docs/#./#g" $i >docs/$i; done # Populate overrides for the current version, and then remove to not apply if VERSION is main branch diff --git a/docs/demo/AliceWantsAJsonCredential.md b/docs/demo/AliceWantsAJsonCredential.md index e69de29bb2..6001c8b12e 100644 --- a/docs/demo/AliceWantsAJsonCredential.md +++ b/docs/demo/AliceWantsAJsonCredential.md @@ -0,0 +1,584 @@ +# How to Issue JSON-LD Credentials using ACA-Py + +ACA-Py has the capability to issue and verify both Indy and JSON-LD (W3C compliant) credentials. + +The JSON-LD support is documented [here](../JsonLdCredentials.md) - this document will provide some additional detail in how to use the demo and admin api to issue and prove JSON-LD credentials. + +## Setup Agents to Issue JSON-LD Credentials + +Clone this repository to a directory on your local: + +```bash +git clone https://github.com/hyperledger/aries-cloudagent-python.git +cd aries-cloudagent-python/demo +``` + +Open up a second shell (so you have 2 shells open in the `demo` directory) and in one shell: + +```bash +LEDGER_URL=http://test.bcovrin.vonx.io ./run_demo faber --did-exchange --aip 20 --cred-type json-ld +``` + +... and in the other: + +```bash +LEDGER_URL=http://test.bcovrin.vonx.io ./run_demo alice +``` + +Note that you start the `faber` agent with AIP2.0 options. (When you specify `--cred-type json-ld` faber will set aip to `20` automatically, +so the `--aip` option is not strictly required). Note as well the use of the `LEDGER_URL`. Technically, that should not be needed if we aren't +doing anything with an Indy ledger-based credentials. However, there must be something in the way that the Faber and Alice controllers are starting up that requires access to a ledger. + +Also note that the above will only work with the `/issue-credential-2.0/create-offer` endpoint. If you want to use the `/issue-credential-2.0/send` endpoint - which automates each step of the credential exchange - you will need to include the `--no-auto` option when starting each of the alice and faber agents (since the alice and faber controllers *also* automatically respond to each step in the credential exchange). + +(Alternately you can run run Alice and Faber agents locally, see the `./faber-local.sh` and `./alice-local.sh` scripts in the `demo` directory.) + +Copy the "invitation" json text from the Faber shell and paste into the Alice shell to establish a connection between the two agents. + +(If you are running with `--no-auto` you will also need to call the `/connections/{conn_id}/accept-invitation` endpoint in alice's admin api swagger page.) + +Now open up two browser windows to the [Faber](http://localhost:8021/api/doc) and [Alice](http://localhost:8031/api/doc) admin api swagger pages. + +Using the Faber admin api, you have to create a DID with the appropriate: + +- DID method ("key" or "sov") +- key type "ed25519" or "bls12381g2" (corresponding to signature types "Ed25519Signature2018" or "BbsBlsSignature2020") +- if you use DID method "sov" you must use key type "ed25519" + +Note that "did:sov" must be a public DID (i.e. registered on the ledger) but "did:key" is not. + +For example, in Faber's swagger page call the `/wallet/did/create` endpoint with the following payload: + +```json +{ + "method": "key", + "options": { + "key_type": "bls12381g2" // or ed25519 + } +} +``` + +This will return something like: + +```json +{ + "result": { + "did": "did:key:zUC71KdwBhq1FioWh53VXmyFiGpewNcg8Ld42WrSChpMzzskRWwHZfG9TJ7hPj8wzmKNrek3rW4ZkXNiHAjVchSmTr9aNUQaArK3KSkTySzjEM73FuDV62bjdAHF7EMnZ27poCE", + "verkey": "mV6482Amu6wJH8NeMqH3QyTjh6JU6N58A8GcirMZG7Wx1uyerzrzerA2EjnhUTmjiSLAp6CkNdpkLJ1NTS73dtcra8WUDDBZ3o455EMrkPyAtzst16RdTMsGe3ctyTxxJav", + "posture": "wallet_only", + "key_type": "bls12381g2", + "method": "key" + } +} +``` + +You do *not* create a schema or cred def for a JSON-LD credential (these are only required for "indy" credentials). + +You will need to create a DID as above for Alice as well (`/wallet/did/create` etc ...). + +Congratulations, you are now ready to start issuing JSON-LD credentials! + +- You have two agents with a connection established between the agents - you will need to copy Faber's `connection_id` into the examples below. +- You have created a (non-public) DID for Faber to use to sign/issue the credentials - you will need to copy the DID that you created above into the examples below (as `issuer`). +- You have created a (non-public) DID for Alice to use as her `credentialSubject.id` - this is required for Alice to sign the proof (the `credentialSubject.id` is not required, but then the provided presentation can't be verified). + +To issue a credential, use the `/issue-credential-2.0/send-offer` endpoint. (You can also use the `/issue-credential-2.0/send`) endpoint, if, as mentioned above, you have included the `--no-auto` when starting both of the agents.) + +You can test with this example payload (just replace the "connection_id", "issuer" key, "credentialSubject.id" and "proofType" with appropriate values: + +```json +{ + "connection_id": "4fba2ce5-b411-4ecf-aa1b-ec66f3f6c903", + "filter": { + "ld_proof": { + "credential": { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1" + ], + "type": ["VerifiableCredential", "UniversityDegreeCredential"], + "issuer": "did:key:zUC71KdwBhq1FioWh53VXmyFiGpewNcg8Ld42WrSChpMzzskRWwHZfG9TJ7hPj8wzmKNrek3rW4ZkXNiHAjVchSmTr9aNUQaArK3KSkTySzjEM73FuDV62bjdAHF7EMnZ27poCE", + "issuanceDate": "2020-01-01T12:00:00Z", + "credentialSubject": { + "id": "did:key:aksdkajshdkajhsdkjahsdkjahsdj", + "givenName": "Sally", + "familyName": "Student", + "degree": { + "type": "BachelorDegree", + "degreeType": "Undergraduate", + "name": "Bachelor of Science and Arts" + }, + "college": "Faber College" + } + }, + "options": { + "proofType": "BbsBlsSignature2020" + } + } + } +} +``` + +Note that if you have the "auto" settings on, this is all you need to do. Otherwise you need to call the `/send-request`, `/store`, etc endpoints to complete the protocol. + +To see the issued credential, call the `/credentials/w3c` endpoint on Alice's admin api - this will return something like: + +```json +{ + "results": [ + { + "contexts": [ + "https://w3id.org/security/bbs/v1", + "https://www.w3.org/2018/credentials/examples/v1", + "https://www.w3.org/2018/credentials/v1" + ], + "types": [ + "UniversityDegreeCredential", + "VerifiableCredential" + ], + "schema_ids": [], + "issuer_id": "did:key:zUC71KdwBhq1FioWh53VXmyFiGpewNcg8Ld42WrSChpMzzskRWwHZfG9TJ7hPj8wzmKNrek3rW4ZkXNiHAjVchSmTr9aNUQaArK3KSkTySzjEM73FuDV62bjdAHF7EMnZ27poCE", + "subject_ids": [], + "proof_types": [ + "BbsBlsSignature2020" + ], + "cred_value": { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + "https://w3id.org/security/bbs/v1" + ], + "type": [ + "VerifiableCredential", + "UniversityDegreeCredential" + ], + "issuer": "did:key:zUC71Kd...poCE", + "issuanceDate": "2020-01-01T12:00:00Z", + "credentialSubject": { + "id": "did:key:aksdkajshdkajhsdkjahsdkjahsdj", + "givenName": "Sally", + "familyName": "Student", + "degree": { + "type": "BachelorDegree", + "degreeType": "Undergraduate", + "name": "Bachelor of Science and Arts" + }, + "college": "Faber College" + }, + "proof": { + "type": "BbsBlsSignature2020", + "proofPurpose": "assertionMethod", + "verificationMethod": "did:key:zUC71Kd...poCE#zUC71Kd...poCE", + "created": "2021-05-19T16:19:44.458170", + "proofValue": "g0weLyw2Q+niQ4pGfiXB...tL9C9ORhy9Q==" + } + }, + "cred_tags": {}, + "record_id": "365ab87b12f74b2db784fdd4db8419f5" + } + ] +} +``` + +If you *don't* see the credential in your wallet, look up the credential exchange record (in alice's admin api - `/issue-credential-2.0/records`) and check the state. If the state is `credential-received`, then the credential has been received but not stored, in this case just call the `/store` endpoint for this credential exchange. + +## Building More Realistic JSON-LD Credentials + +The above example uses the `https://www.w3.org/2018/credentials/examples/v1` context, which should never be used in a real application. + +To build credentials in real life, you first determine which attributes you need and then include the appropriate contexts. + +### Context schema.org + +You can use attributes defined on [schema.org](https://schema.org). Although this is *NOT RECOMMENDED* (included here for illustrative purposes only) - individual attributes can't be validated (see the comment later on). + +You first include `https://schema.org` in the `@context` block of the credential as follows: + +```json +"@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://schema.org" +], +``` + +Then you review the [attributes and objects defined by `https://schema.org`](https://schema.org/docs/schemas.html) and decide what you need to include in your credential. + +For example to issue a credetial with [givenName](https://schema.org/givenName), [familyName](https://schema.org/familyName) and [alumniOf](https://schema.org/alumniOf) attributes, submit the following: + +```json +{ + "connection_id": "ad35a4d8-c84b-4a4f-a83f-1afbf134b8b9", + "filter": { + "ld_proof": { + "credential": { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://schema.org" + ], + "type": ["VerifiableCredential", "Person"], + "issuer": "did:key:zUC71pj2gpDLfcZ9DE1bMtjZGWCSLhkQsUCaKjqXtCftGkz27894pEX9VvGNiFsaV67gqv2TEPQ2aDaDDdTDNp42LfDdK1LaWSBCfzsQEyaiR1zjZm1RtoRu1ZM6v6vz4TiqDgU", + "issuanceDate": "2020-01-01T12:00:00Z", + "credentialSubject": { + "id": "did:key:aksdkajshdkajhsdkjahsdkjahsdj", + "givenName": "Sally", + "familyName": "Student", + "alumniOf": "Example University" + } + }, + "options": { + "proofType": "BbsBlsSignature2020" + } + } + } +} +``` + +Note that with `https://schema.org`, if you include attributes that aren't defined by *any* context, you will *not* get an error. For example you can try replacing the `credentialSubject` in the above with: + +```json +"credentialSubject": { + "id": "did:key:aksdkajshdkajhsdkjahsdkjahsdj", + "givenName": "Sally", + "familyName": "Student", + "alumniOf": "Example University", + "someUndefinedAttribute": "the value of the attribute" +} +``` + +... and the credential issuance *should* fail, however `https://schema.org` defines a `@vocab` that by default all terms derive from ([see here](https://stackoverflow.com/questions/30945898/what-is-the-use-of-vocab-in-json-ld-and-what-is-the-difference-to-context/30948037#30948037)). + +You can include more complex schemas, for example to use the schema.org [Person](https://schema.org/Person) schema (which includes `givenName` and `familyName`): + +```json +{ + "connection_id": "ad35a4d8-c84b-4a4f-a83f-1afbf134b8b9", + "filter": { + "ld_proof": { + "credential": { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://schema.org" + ], + "type": ["VerifiableCredential", "Person"], + "issuer": "did:key:zUC71pj2gpDLfcZ9DE1bMtjZGWCSLhkQsUCaKjqXtCftGkz27894pEX9VvGNiFsaV67gqv2TEPQ2aDaDDdTDNp42LfDdK1LaWSBCfzsQEyaiR1zjZm1RtoRu1ZM6v6vz4TiqDgU", + "issuanceDate": "2020-01-01T12:00:00Z", + "credentialSubject": { + "id": "did:key:aksdkajshdkajhsdkjahsdkjahsdj", + "student": { + "type": "Person", + "givenName": "Sally", + "familyName": "Student", + "alumniOf": "Example University" + } + } + }, + "options": { + "proofType": "BbsBlsSignature2020" + } + } + } +} +``` + +## Credential-Specific Contexts + +The recommended approach to defining credentials is to define a credential-specific vocabulary (or make use of existing ones). (Note that these can include references to `https://schema.org`, you just shouldn't uste this directly in your credential.) + +### Credential Issue Example + +The following example uses the W3C citizenship context to issue a PermanentResident credential (replace the `connection_id`, `issuer` and `credentialSubject.id` with your local values): + +```json +{ + "connection_id": "41acd909-9f45-4c69-8641-8146e0444a57", + "filter": { + "ld_proof": { + "credential": { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1" + ], + "type": [ + "VerifiableCredential", + "PermanentResident" + ], + "id": "https://credential.example.com/residents/1234567890", + "issuer": "did:key:zUC7Dus47jW5Avcne8LLsUvJSdwspmErgehxMWqZZy8eSSNoHZ4x8wgs77sAmQtCADED5RQP1WWhvt7KFNm6GGMxdSGpKu3PX6R9a61G9VoVsiFoRf1yoK6pzhq9jtFP3e2SmU9", + "issuanceDate": "2020-01-01T12:00:00Z", + "credentialSubject": { + "type": [ + "PermanentResident" + ], + "id": "did:key:zUC7CXi82AXbkv4SvhxDxoufrLwQSAo79qbKiw7omCQ3c4TyciDdb9s3GTCbMvsDruSLZX6HNsjGxAr2SMLCNCCBRN5scukiZ4JV9FDPg5gccdqE9nfCU2zUcdyqRiUVnn9ZH83", + "givenName": "ALICE", + "familyName": "SMITH", + "gender": "Female", + "birthCountry": "Bahamas", + "birthDate": "1958-07-17" + } + }, + "options": { + "proofType": "BbsBlsSignature2020" + } + } + } +} +``` + +Copy and paste this content into Faber's `/issue-credential-2.0/send-offer` endpoint, and it will kick off the exchange process to issue a W3C credential to Alice. + +In Alice's swagger page, submit the `/credentials/records/w3c` endpoint to see the issued credential. + +### Request Presentation Example + +To request a proof, submit the following (with appropriate `connection_id`) to Faber's `/present-proof-2.0/send-request` endpoint: + +```json +{ + "comment": "string", + "connection_id": "41acd909-9f45-4c69-8641-8146e0444a57", + "presentation_request": { + "dif": { + "options": { + "challenge": "3fa85f64-5717-4562-b3fc-2c963f66afa7", + "domain": "4jt78h47fh47" + }, + "presentation_definition": { + "id": "32f54163-7166-48f1-93d8-ff217bdb0654", + "format": { + "ldp_vp": { + "proof_type": [ + "BbsBlsSignature2020" + ] + } + }, + "input_descriptors": [ + { + "id": "citizenship_input_1", + "name": "EU Driver's License", + "schema": [ + { + "uri": "https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri": "https://w3id.org/citizenship#PermanentResident" + } + ], + "constraints": { + "limit_disclosure": "required", + "is_holder": [ + { + "directive": "required", + "field_id": [ + "1f44d55f-f161-4938-a659-f8026467f126" + ] + } + ], + "fields": [ + { + "id": "1f44d55f-f161-4938-a659-f8026467f126", + "path": [ + "$.credentialSubject.familyName" + ], + "purpose": "The claim must be from one of the specified issuers", + "filter": { + "const": "SMITH" + } + }, + { + "path": [ + "$.credentialSubject.givenName" + ], + "purpose": "The claim must be from one of the specified issuers" + } + ] + } + } + ] + } + } + } +} +``` + +Note that the `is_holder` property can be used by Faber to verify that the holder of credential is the same as the subject of the attribute (`familyName`). Later on, the received presentation will be signed and verifiable only if `is_holder` with `"directive": "required"` is included in the presentation request. + +There are several ways that Alice can respond with a presentation. The simplest will just tell ACA-Py to put the presentation together and send it to Faber - submit the following to Alice's `/present-proof-2.0/records/{pres_ex_id}/send-presentation`: + +```json +{ + "dif": { + } +} +``` + +There are two ways that Alice can provide some constraints to tell ACA-Py which credential(s) to include in the presentation. + +Firstly, Alice can include the received presentation request in the body to the `/send-presentation` endpoint, and can include additional constraints on the fields: + +```json +{ + "dif": { + "issuer_id": "did:key:zUC7Dus47jW5Avcne8LLsUvJSdwspmErgehxMWqZZy8eSSNoHZ4x8wgs77sAmQtCADED5RQP1WWhvt7KFNm6GGMxdSGpKu3PX6R9a61G9VoVsiFoRf1yoK6pzhq9jtFP3e2SmU9", + "presentation_definition": { + "format": { + "ldp_vp": { + "proof_type": [ + "BbsBlsSignature2020" + ] + } + }, + "id": "32f54163-7166-48f1-93d8-ff217bdb0654", + "input_descriptors": [ + { + "id": "citizenship_input_1", + "name": "Some kind of citizenship check", + "schema": [ + { + "uri": "https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri": "https://w3id.org/citizenship#PermanentResident" + } + ], + "constraints": { + "limit_disclosure": "required", + "is_holder": [ + { + "directive": "required", + "field_id": [ + "1f44d55f-f161-4938-a659-f8026467f126", + "332be361-823a-4863-b18b-c3b930c5623e" + ], + } + ], + "fields": [ + { + "id": "1f44d55f-f161-4938-a659-f8026467f126", + "path": [ + "$.credentialSubject.familyName" + ], + "purpose": "The claim must be from one of the specified issuers", + "filter": { + "const": "SMITH" + } + }, + { + "id": "332be361-823a-4863-b18b-c3b930c5623e", + "path": [ + "$.id" + ], + "purpose": "Specify the id of the credential to present", + "filter": { + "const": "https://credential.example.com/residents/1234567890" + } + } + ] + } + } + ] + } + } +} +``` + +Note the additional constraint on `"path": [ "$.id" ]` - this restricts the presented credential to the one with the matching `credential.id`. Any credential attributes can be used, however this presumes that the issued credentials contain a uniquely identifying attribute. + +Another option is for Alice to specify the credential `record_id` - this is an internal value within ACA-Py: + +```json +{ + "dif": { + "issuer_id": "did:key:zUC7Dus47jW5Avcne8LLsUvJSdwspmErgehxMWqZZy8eSSNoHZ4x8wgs77sAmQtCADED5RQP1WWhvt7KFNm6GGMxdSGpKu3PX6R9a61G9VoVsiFoRf1yoK6pzhq9jtFP3e2SmU9", + "presentation_definition": { + "format": { + "ldp_vp": { + "proof_type": [ + "BbsBlsSignature2020" + ] + } + }, + "id": "32f54163-7166-48f1-93d8-ff217bdb0654", + "input_descriptors": [ + { + "id": "citizenship_input_1", + "name": "Some kind of citizenship check", + "schema": [ + { + "uri": "https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri": "https://w3id.org/citizenship#PermanentResident" + } + ], + "constraints": { + "limit_disclosure": "required", + "fields": [ + { + "path": [ + "$.credentialSubject.familyName" + ], + "purpose": "The claim must be from one of the specified issuers", + "filter": { + "const": "SMITH" + } + } + ] + } + } + ] + }, + "record_ids": { + "citizenship_input_1": [ "1496316f972e40cf9b46b35971182337" ] + } + } +} +``` + +### Another Credential Issue Example + +TBD the following credential is based on the W3C Vaccination schema: + +```json +{ + "connection_id": "ad35a4d8-c84b-4a4f-a83f-1afbf134b8b9", + "filter": { + "ld_proof": { + "credential": { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/vaccination/v1" + ], + "type": ["VerifiableCredential", "VaccinationCertificate"], + "issuer": "did:key:zUC71pj2gpDLfcZ9DE1bMtjZGWCSLhkQsUCaKjqXtCftGkz27894pEX9VvGNiFsaV67gqv2TEPQ2aDaDDdTDNp42LfDdK1LaWSBCfzsQEyaiR1zjZm1RtoRu1ZM6v6vz4TiqDgU", + "issuanceDate": "2020-01-01T12:00:00Z", + "credentialSubject": { + "id": "did:key:aksdkajshdkajhsdkjahsdkjahsdj", + "type": "VaccinationEvent", + "batchNumber": "1183738569", + "administeringCentre": "MoH", + "healthProfessional": "MoH", + "countryOfVaccination": "NZ", + "recipient": { + "type": "VaccineRecipient", + "givenName": "JOHN", + "familyName": "SMITH", + "gender": "Male", + "birthDate": "1958-07-17" + }, + "vaccine": { + "type": "Vaccine", + "disease": "COVID-19", + "atcCode": "J07BX03", + "medicinalProductName": "COVID-19 Vaccine Moderna", + "marketingAuthorizationHolder": "Moderna Biotech" + } + } + }, + "options": { + "proofType": "BbsBlsSignature2020" + } + } + } +} +``` From 9fa06c42e84c32fb01237d49a28349985c4e06b9 Mon Sep 17 00:00:00 2001 From: jamshale Date: Fri, 1 Mar 2024 18:45:25 +0000 Subject: [PATCH 52/69] Send revocation list instead of rev_list object - Anoncreds Signed-off-by: jamshale --- aries_cloudagent/anoncreds/revocation.py | 4 +++- .../protocols/revocation_notification/v1_0/routes.py | 4 ++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/aries_cloudagent/anoncreds/revocation.py b/aries_cloudagent/anoncreds/revocation.py index 0854c4a0b4..142731baf1 100644 --- a/aries_cloudagent/anoncreds/revocation.py +++ b/aries_cloudagent/anoncreds/revocation.py @@ -494,7 +494,9 @@ async def store_revocation_registry_list(self, result: RevListResult): if result.revocation_list_state.state == STATE_FINISHED: await self.notify( - RevListFinishedEvent.with_payload(rev_list.rev_reg_def_id, rev_list) + RevListFinishedEvent.with_payload( + rev_list.rev_reg_def_id, rev_list.revocation_list + ) ) except AskarError as err: diff --git a/aries_cloudagent/protocols/revocation_notification/v1_0/routes.py b/aries_cloudagent/protocols/revocation_notification/v1_0/routes.py index a71c95125d..3ab25fec76 100644 --- a/aries_cloudagent/protocols/revocation_notification/v1_0/routes.py +++ b/aries_cloudagent/protocols/revocation_notification/v1_0/routes.py @@ -54,6 +54,10 @@ async def on_revocation_published(profile: Profile, event: Event): await responder.send( record.to_message(), connection_id=record.connection_id ) + LOGGER.info( + "Sent revocation notification for credential to %s", + record.connection_id, + ) except StorageNotFoundError: LOGGER.info( From 16dc5f738f8c4bbed454b0281cbd1c6f2c12c2d2 Mon Sep 17 00:00:00 2001 From: tra371 Date: Mon, 5 Feb 2024 23:59:42 +0630 Subject: [PATCH 53/69] feat: add new format and implement VCDICredFormatHandler (Draft) Signed-off-by: tra371 --- aries_cloudagent/indy/models/cred_abstract.py | 160 ++++++- aries_cloudagent/indy/models/cred_request.py | 103 ++++- .../v2_0/formats/vc_di/__init__.py | 0 .../v2_0/formats/vc_di/handler.py | 437 ++++++++++++++++++ .../issue_credential/v2_0/message_types.py | 8 +- .../v2_0/messages/cred_format.py | 9 + .../v2_0/models/detail/vc_di.py | 135 ++++++ 7 files changed, 847 insertions(+), 5 deletions(-) create mode 100644 aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/__init__.py create mode 100644 aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py create mode 100644 aries_cloudagent/protocols/issue_credential/v2_0/models/detail/vc_di.py diff --git a/aries_cloudagent/indy/models/cred_abstract.py b/aries_cloudagent/indy/models/cred_abstract.py index 9abdbd5bb3..c5e46f233b 100644 --- a/aries_cloudagent/indy/models/cred_abstract.py +++ b/aries_cloudagent/indy/models/cred_abstract.py @@ -1,6 +1,7 @@ """Cred abstract artifacts to attach to RFC 453 messages.""" -from typing import Sequence +from typing import Sequence, Union +from ...vc.vc_ld.models.credential import CredentialSchema, VerifiableCredential from marshmallow import EXCLUDE, fields @@ -152,3 +153,160 @@ class Meta: required=True, metadata={"description": "Key correctness proof"}, ) + + +class AnoncredsLinkSecret(BaseModel): + """Anoncreds Link Secret Model.""" + + class Meta: + """AnoncredsLinkSecret metadata.""" + + schema_class = "AnoncredsLinkSecretSchema" + + def __init__( + self, + nonce: str = None, + cred_def_id: str = None, + key_correctness_proof: str = None, + **kwargs, + ): + """Initialize values for AnoncredsLinkSecret.""" + super().__init__(**kwargs) + self.nonce = nonce + self.cred_def_id = cred_def_id + self.key_correctness_proof = key_correctness_proof + + +class AnoncredsLinkSecretSchema(BaseModelSchema): + """Anoncreds Link Secret Schema.""" + + nonce = fields.Str( + required=True, + validate=NUM_STR_WHOLE_VALIDATE, + metadata={ + "description": "Nonce in credential abstract", + "example": NUM_STR_WHOLE_EXAMPLE, + }, + ) + + cred_def_id = fields.Str( + required=True, + validate=INDY_CRED_DEF_ID_VALIDATE, + metadata={ + "description": "Credential definition identifier", + "example": INDY_CRED_DEF_ID_EXAMPLE, + }, + ) + + key_correctness_proof = fields.Nested( + IndyKeyCorrectnessProofSchema(), + required=True, + metadata={"description": "Key correctness proof"}, + ) + + +class DidcommSignedAttachment(BaseModel): + """Didcomm Signed Attachment Model.""" + + class Meta: + """DidcommSignedAttachment metadata.""" + + schema_class = "DidcommSignedAttachmentSchema" + + def __init__( + self, + algs_supported: Sequence[str] = None, + did_methods_supported: Sequence[str] = None, + nonce: str = None, + **kwargs, + ): + """Initialize values for DidcommSignedAttachment.""" + super().__init__(**kwargs) + self.algs_supported = algs_supported + self.did_methods_supported = did_methods_supported + self.nonce = nonce + + +class DidcommSignedAttachmentSchema(BaseModelSchema): + """Didcomm Signed Attachment Schema.""" + + algs_supported = fields.List(fields.Str(), required=True) + + did_methods_supported = fields.List(fields.Str(), required=True) + + nonce = fields.Str( + required=True, + validate=NUM_STR_WHOLE_VALIDATE, + metadata={ + "description": "Nonce in credential abstract", + "example": NUM_STR_WHOLE_EXAMPLE, + }, + ) + + +class BindingMethodSchema(BaseModelSchema): + """VCDI Binding Method Schema.""" + + anoncreds_link_secret = fields.Nested(AnoncredsLinkSecretSchema, required=False) + didcomm_signed_attachment = fields.Nested( + DidcommSignedAttachmentSchema, required=True + ) + + +class VCDICredAbstract(BaseModel): + """VCDI Credential Abstract.""" + + class Meta: + """VCDI credential abstract metadata.""" + + schema_class = "VCDICredAbstractSchema" + + def __init__( + self, + data_model_versions_supported: str = None, + binding_required: str = None, + binding_methods: str = None, + credential: Union[dict, VerifiableCredential] = None, + **kwargs, + ): + """Initialize vcdi cred abstract object. + + Args: + data_model_versions_supported: supported versions for data model + binding_required: boolean value + binding_methods: required if binding_required is true + credential: credential object + """ + super().__init__(**kwargs) + self.data_model_versions_supported = data_model_versions_supported + self.binding_required = binding_required + self.binding_methods = binding_methods + self.credential = credential + + +class VCDICredAbstractSchema(BaseModelSchema): + """VCDI Credential Abstract Schema.""" + + class Meta: + """VCDICredAbstractSchema metadata.""" + + model_class = VCDICredAbstract + unknown = EXCLUDE + + data_model_versions_supported = fields.List( + required=True, validate="", metadata={"description": "", "example": ""} + ) + + binding_required = fields.Bool( + required=False, metadata={"description": "", "example": ""} + ) + + binding_method = fields.Nested( + BindingMethodSchema(), + required=binding_required, + metadata={"description": "", "example": ""}, + ) + + credential = fields.Nested( + CredentialSchema(), required=True, metadata={"description": "", "example": ""} + ) diff --git a/aries_cloudagent/indy/models/cred_request.py b/aries_cloudagent/indy/models/cred_request.py index 805cdaa61d..a722170c32 100644 --- a/aries_cloudagent/indy/models/cred_request.py +++ b/aries_cloudagent/indy/models/cred_request.py @@ -1,6 +1,6 @@ """Cred request artifacts to attach to RFC 453 messages.""" -from typing import Mapping +from typing import Mapping, Union from marshmallow import EXCLUDE, fields @@ -79,3 +79,104 @@ class Meta: "example": NUM_STR_WHOLE_EXAMPLE, }, ) + + +class BindingProof(BaseModel): + """Binding proof model.""" + + class Meta: + """VCDI credential request schema metadata.""" + + schema_class = "BindingProofSchema" + + def __init__( + self, + entropy: str = None, + cred_def_id: str = None, + blinded_ms: Mapping = None, + blinded_ms_correctness_proof: Mapping = None, + nonce: str = None, + **kwargs, + ): + """Initialize indy credential request.""" + super().__init__(**kwargs) + self.entropy = entropy + self.cred_def_id = cred_def_id + self.blinded_ms = blinded_ms + self.blinded_ms_correctness_proof = blinded_ms_correctness_proof + self.nonce = nonce + + +class BindingProofSchema(BaseModelSchema): + """VCDI credential request schema.""" + + class Meta: + """VCDI credential request schema metadata.""" + + model_class = BindingProof + unknown = EXCLUDE + + entropy = fields.Str( + required=True, + validate=INDY_DID_VALIDATE, + metadata={"description": "Prover DID", "example": INDY_DID_EXAMPLE}, + ) + cred_def_id = fields.Str( + required=True, + validate=INDY_CRED_DEF_ID_VALIDATE, + metadata={ + "description": "Credential definition identifier", + "example": INDY_CRED_DEF_ID_EXAMPLE, + }, + ) + blinded_ms = fields.Dict( + required=True, metadata={"description": "Blinded master secret"} + ) + blinded_ms_correctness_proof = fields.Dict( + required=True, + metadata={"description": "Blinded master secret correctness proof"}, + ) + nonce = fields.Str( + required=True, + validate=NUM_STR_WHOLE_VALIDATE, + metadata={ + "description": "Nonce in credential request", + "example": NUM_STR_WHOLE_EXAMPLE, + }, + ) + + +class VCDICredRequest(BaseModel): + """VCDI credential request model.""" + + class Meta: + """VCDI credential request metadata.""" + + schema_class = "VCDICredRequestSchema" + + def __init__( + self, + data_model_version: str = None, + binding_proof: Union[dict, BindingProof] = None, + **kwargs, + ): + """Initialize values for VCDICredRequest.""" + super().__init__(**kwargs) + self.data_model_version = data_model_version + self.binding_proof = binding_proof + + +class VCDICredRequestSchema(BaseModelSchema): + """VCDI credential request schema.""" + + class Meta: + """VCDI credential request schema metadata.""" + + model_class = VCDICredRequest + unknown = EXCLUDE + + data_model_version = fields.str( + required=True, metadata={"description": "", "example": ""} + ) + + binding_proof = fields.str(required=True, metadata={"description": "", "example": ""}) diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/__init__.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py new file mode 100644 index 0000000000..3a7bb4f0f4 --- /dev/null +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py @@ -0,0 +1,437 @@ +"""V2.0 issue-credential indy credential format handler.""" + +import json +import logging +from typing import Mapping, Tuple +from aries_cloudagent.protocols.issue_credential.v2_0.models.detail.vc_di import ( + V20CredExRecordVCDI, +) +from aries_cloudagent.vc.vc_ld.models.credential import VerifiableCredentialSchema + +from marshmallow import RAISE + +from ......anoncreds.revocation import AnonCredsRevocation + +from ......anoncreds.registry import AnonCredsRegistry +from ......anoncreds.holder import AnonCredsHolder, AnonCredsHolderError +from ......anoncreds.issuer import ( + AnonCredsIssuer, +) +from ......indy.models.cred import IndyCredentialSchema +from ......indy.models.cred_abstract import IndyCredAbstractSchema, VCDICredAbstractSchema +from ......indy.models.cred_request import IndyCredRequestSchema, VCDICredRequestSchema +from ......cache.base import BaseCache +from ......ledger.base import BaseLedger +from ......ledger.multiple_ledger.ledger_requests_executor import ( + GET_CRED_DEF, + IndyLedgerRequestsExecutor, +) +from ......messaging.credential_definitions.util import ( + CRED_DEF_SENT_RECORD_TYPE, + CredDefQueryStringSchema, +) +from ......messaging.decorators.attach_decorator import AttachDecorator +from ......multitenant.base import BaseMultitenantManager +from ......revocation_anoncreds.models.issuer_cred_rev_record import IssuerCredRevRecord +from ......storage.base import BaseStorage +from ...message_types import ( + ATTACHMENT_FORMAT, + CRED_20_ISSUE, + CRED_20_OFFER, + CRED_20_PROPOSAL, + CRED_20_REQUEST, +) +from ...messages.cred_format import V20CredFormat +from ...messages.cred_issue import V20CredIssue +from ...messages.cred_offer import V20CredOffer +from ...messages.cred_proposal import V20CredProposal +from ...messages.cred_request import V20CredRequest +from ...models.cred_ex_record import V20CredExRecord +from ...models.detail.indy import V20CredExRecordIndy +from ..handler import CredFormatAttachment, V20CredFormatError, V20CredFormatHandler + +LOGGER = logging.getLogger(__name__) + + +class VCDICredFormatHandler(V20CredFormatHandler): + """VCDI credential format handler.""" + + format = V20CredFormat.Format.VC_DI + + @classmethod + def validate_fields(cls, message_type: str, attachment_data: Mapping): + """Validate attachment data for a specific message type. + + Uses marshmallow schemas to validate if format specific attachment data + is valid for the specified message type. Only does structural and type + checks, does not validate if .e.g. the issuer value is valid. + + + Args: + message_type (str): The message type to validate the attachment data for. + Should be one of the message types as defined in message_types.py + attachment_data (Mapping): [description] + The attachment data to valide + + Raises: + Exception: When the data is not valid. + + """ + mapping = { + CRED_20_PROPOSAL: CredDefQueryStringSchema, + CRED_20_OFFER: VCDICredAbstractSchema, + CRED_20_REQUEST: VCDICredRequestSchema, + CRED_20_ISSUE: VerifiableCredentialSchema, + } + + # Get schema class + Schema = mapping[message_type] + + # Validate, throw if not valid + Schema(unknown=RAISE).load(attachment_data) + + async def get_detail_record(self, cred_ex_id: str) -> V20CredExRecordVCDI: + """Retrieve credential exchange detail record by cred_ex_id.""" + + async with self.profile.session() as session: + records = await VCDICredFormatHandler.format.detail.query_by_cred_ex_id( + session, cred_ex_id + ) + + if len(records) > 1: + LOGGER.warning( + "Cred ex id %s has %d %s detail records: should be 1", + cred_ex_id, + len(records), + VCDICredFormatHandler.format.api, + ) + return records[0] if records else None + + async def _check_uniqueness(self, cred_ex_id: str): + """Raise exception on evidence that cred ex already has cred issued to it.""" + async with self.profile.session() as session: + exist = await VCDICredFormatHandler.format.detail.query_by_cred_ex_id( + session, cred_ex_id + ) + if exist: + raise V20CredFormatError( + f"{VCDICredFormatHandler.format.api} detail record already " + f"exists for cred ex id {cred_ex_id}" + ) + + def get_format_identifier(self, message_type: str) -> str: + """Get attachment format identifier for format and message combination. + + Args: + message_type (str): Message type for which to return the format identifier + + Returns: + str: Issue credential attachment format identifier + + """ + return ATTACHMENT_FORMAT[message_type][VCDICredFormatHandler.format.api] + + def get_format_data(self, message_type: str, data: dict) -> CredFormatAttachment: + """Get credential format and attachment objects for use in cred ex messages. + + Returns a tuple of both credential format and attachment decorator for use + in credential exchange messages. It looks up the correct format identifier and + encodes the data as a base64 attachment. + + Args: + message_type (str): The message type for which to return the cred format. + Should be one of the message types defined in the message types file + data (dict): The data to include in the attach decorator + + Returns: + CredFormatAttachment: Credential format and attachment data objects + + """ + return ( + V20CredFormat( + attach_id=VCDICredFormatHandler.format.api, + format_=self.get_format_identifier(message_type), + ), + AttachDecorator.data_base64(data, ident=VCDICredFormatHandler.format.api), + ) + + async def _match_sent_cred_def_id(self, tag_query: Mapping[str, str]) -> str: + """Return most recent matching id of cred def that agent sent to ledger.""" + + async with self.profile.session() as session: + storage = session.inject(BaseStorage) + found = await storage.find_all_records( + type_filter=CRED_DEF_SENT_RECORD_TYPE, tag_query=tag_query + ) + if not found: + raise V20CredFormatError( + f"Issuer has no operable cred def for proposal spec {tag_query}" + ) + return max(found, key=lambda r: int(r.tags["epoch"])).tags["cred_def_id"] + + async def create_proposal( + self, cred_ex_record: V20CredExRecord, proposal_data: Mapping[str, str] + ) -> Tuple[V20CredFormat, AttachDecorator]: + """Create indy credential proposal.""" + if proposal_data is None: + proposal_data = {} + + return self.get_format_data(CRED_20_PROPOSAL, proposal_data) + + async def receive_proposal( + self, cred_ex_record: V20CredExRecord, cred_proposal_message: V20CredProposal + ) -> None: + """Receive vcdi credential proposal. + + No custom handling is required for this step. + """ + + async def create_offer( + self, cred_proposal_message: V20CredProposal + ) -> CredFormatAttachment: + """Create vcdi credential offer.""" + + issuer = AnonCredsIssuer(self.profile) + ledger = self.profile.inject(BaseLedger) + cache = self.profile.inject_or(BaseCache) + + cred_def_id = await issuer.match_created_credential_definitions( + **cred_proposal_message.attachment(VCDICredFormatHandler.format) + ) + + async def _create(): + # TODO - implement a separate create_credential_offer for vcdi + offer_json = await issuer.create_credential_offer(cred_def_id) + return json.loads(offer_json) + + multitenant_mgr = self.profile.inject_or(BaseMultitenantManager) + if multitenant_mgr: + ledger_exec_inst = IndyLedgerRequestsExecutor(self.profile) + else: + ledger_exec_inst = self.profile.inject(IndyLedgerRequestsExecutor) + ledger = ( + await ledger_exec_inst.get_ledger_for_identifier( + cred_def_id, + txn_record_type=GET_CRED_DEF, + ) + )[1] + async with ledger: + schema_id = await ledger.credential_definition_id2schema_id(cred_def_id) + schema = await ledger.get_schema(schema_id) + schema_attrs = set(schema["attrNames"]) + preview_attrs = set(cred_proposal_message.credential_preview.attr_dict()) + if preview_attrs != schema_attrs: + raise V20CredFormatError( + f"Preview attributes {preview_attrs} " + f"mismatch corresponding schema attributes {schema_attrs}" + ) + + cred_offer = None + cache_key = f"credential_offer::{cred_def_id}" + + if cache: + async with cache.acquire(cache_key) as entry: + if entry.result: + cred_offer = entry.result + else: + cred_offer = await _create() + await entry.set_result(cred_offer, 3600) + if not cred_offer: + cred_offer = await _create() + + return self.get_format_data(CRED_20_OFFER, cred_offer) + + async def receive_offer( + self, cred_ex_record: V20CredExRecord, cred_offer_message: V20CredOffer + ) -> None: + """Receive indy credential offer.""" + + async def create_request( + self, cred_ex_record: V20CredExRecord, request_data: Mapping = None + ) -> CredFormatAttachment: + """Create indy credential request.""" + if cred_ex_record.state != V20CredExRecord.STATE_OFFER_RECEIVED: + raise V20CredFormatError( + "Indy issue credential format cannot start from credential request" + ) + + await self._check_uniqueness(cred_ex_record.cred_ex_id) + + holder_did = request_data.get("holder_did") if request_data else None + cred_offer = cred_ex_record.cred_offer.attachment(VCDICredFormatHandler.format) + + if "nonce" not in cred_offer: + raise V20CredFormatError("Missing nonce in credential offer") + + nonce = cred_offer["nonce"] + cred_def_id = cred_offer["cred_def_id"] + + async def _create(): + anoncreds_registry = self.profile.inject(AnonCredsRegistry) + + cred_def_result = await anoncreds_registry.get_credential_definition( + self.profile, cred_def_id + ) + + holder = AnonCredsHolder(self.profile) + request_json, metadata_json = await holder.create_credential_request( + cred_offer, cred_def_result.credential_definition, holder_did + ) + + return { + "request": json.loads(request_json), + "metadata": json.loads(metadata_json), + } + + cache_key = f"credential_request::{cred_def_id}::{holder_did}::{nonce}" + cred_req_result = None + cache = self.profile.inject_or(BaseCache) + if cache: + async with cache.acquire(cache_key) as entry: + if entry.result: + cred_req_result = entry.result + else: + cred_req_result = await _create() + await entry.set_result(cred_req_result, 3600) + if not cred_req_result: + cred_req_result = await _create() + + detail_record = V20CredExRecordVCDI( + cred_ex_id=cred_ex_record.cred_ex_id, + cred_request_metadata=cred_req_result["metadata"], + ) + + async with self.profile.session() as session: + await detail_record.save(session, reason="create v2.0 credential request") + + return self.get_format_data(CRED_20_REQUEST, cred_req_result["request"]) + + async def receive_request( + self, cred_ex_record: V20CredExRecord, cred_request_message: V20CredRequest + ) -> None: + """Receive indy credential request.""" + if not cred_ex_record.cred_offer: + raise V20CredFormatError( + "Indy issue credential format cannot start from credential request" + ) + + async def issue_credential( + self, cred_ex_record: V20CredExRecord, retries: int = 5 + ) -> CredFormatAttachment: + """Issue indy credential.""" + await self._check_uniqueness(cred_ex_record.cred_ex_id) + + cred_offer = cred_ex_record.cred_offer.attachment(VCDICredFormatHandler.format) + cred_request = cred_ex_record.cred_request.attachment( + VCDICredFormatHandler.format + ) + cred_values = cred_ex_record.cred_offer.credential_preview.attr_dict(decode=False) + + issuer = AnonCredsIssuer(self.profile) + cred_def_id = cred_offer["cred_def_id"] + if await issuer.cred_def_supports_revocation(cred_def_id): + revocation = AnonCredsRevocation(self.profile) + cred_json, cred_rev_id, rev_reg_def_id = await revocation.create_credential( + cred_offer, cred_request, cred_values + ) + else: + # TODO - implement a separate create_credential for vcdi + cred_json = await issuer.create_credential( + cred_offer, cred_request, cred_values + ) + cred_rev_id = None + rev_reg_def_id = None + + result = self.get_format_data(CRED_20_ISSUE, json.loads(cred_json)) + + async with self._profile.transaction() as txn: + detail_record = V20CredExRecordIndy( + cred_ex_id=cred_ex_record.cred_ex_id, + rev_reg_id=rev_reg_def_id, + cred_rev_id=cred_rev_id, + ) + await detail_record.save(txn, reason="v2.0 issue credential") + + if cred_rev_id: + issuer_cr_rec = IssuerCredRevRecord( + state=IssuerCredRevRecord.STATE_ISSUED, + cred_ex_id=cred_ex_record.cred_ex_id, + cred_ex_version=IssuerCredRevRecord.VERSION_2, + rev_reg_id=rev_reg_def_id, + cred_rev_id=cred_rev_id, + ) + await issuer_cr_rec.save( + txn, + reason=( + "Created issuer cred rev record for " + f"rev reg id {rev_reg_def_id}, index {cred_rev_id}" + ), + ) + await txn.commit() + + return result + + async def receive_credential( + self, cred_ex_record: V20CredExRecord, cred_issue_message: V20CredIssue + ) -> None: + """Receive indy credential. + + Validation is done in the store credential step. + """ + + async def store_credential( + self, cred_ex_record: V20CredExRecord, cred_id: str = None + ) -> None: + """Store indy credential.""" + cred = cred_ex_record.cred_issue.attachment(VCDICredFormatHandler.format) + + rev_reg_def = None + anoncreds_registry = self.profile.inject(AnonCredsRegistry) + cred_def_result = await anoncreds_registry.get_credential_definition( + self.profile, cred["cred_def_id"] + ) + if cred.get("rev_reg_id"): + rev_reg_def_result = ( + await anoncreds_registry.get_revocation_registry_definition( + self.profile, cred["rev_reg_id"] + ) + ) + rev_reg_def = rev_reg_def_result.revocation_registry + + holder = AnonCredsHolder(self.profile) + cred_offer_message = cred_ex_record.cred_offer + mime_types = None + if cred_offer_message and cred_offer_message.credential_preview: + mime_types = cred_offer_message.credential_preview.mime_types() or None + + if rev_reg_def: + revocation = AnonCredsRevocation(self.profile) + await revocation.get_or_fetch_local_tails_path(rev_reg_def) + try: + detail_record = await self.get_detail_record(cred_ex_record.cred_ex_id) + if detail_record is None: + raise V20CredFormatError( + f"No credential exchange {VCDICredFormatHandler.format.aries} " + f"detail record found for cred ex id {cred_ex_record.cred_ex_id}" + ) + cred_id_stored = await holder.store_credential( + cred_def_result.credential_definition.serialize(), + cred, + detail_record.cred_request_metadata, + mime_types, + credential_id=cred_id, + rev_reg_def=rev_reg_def.serialize() if rev_reg_def else None, + ) + + detail_record.cred_id_stored = cred_id_stored + detail_record.rev_reg_id = cred.get("rev_reg_id", None) + detail_record.cred_rev_id = cred.get("cred_rev_id", None) + + async with self.profile.session() as session: + # Store detail record, emit event + await detail_record.save( + session, reason="store credential v2.0", event=True + ) + except AnonCredsHolderError as e: + LOGGER.error(f"Error storing credential: {e.error_code} - {e.message}") + raise e diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py b/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py index 33c7241ae1..b2b9e4325e 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py @@ -22,9 +22,7 @@ MESSAGE_TYPES = DIDCommPrefix.qualify_all( { - CRED_20_PROPOSAL: ( - f"{PROTOCOL_PACKAGE}.messages.cred_proposal.V20CredProposal" - ), + CRED_20_PROPOSAL: (f"{PROTOCOL_PACKAGE}.messages.cred_proposal.V20CredProposal"), CRED_20_OFFER: f"{PROTOCOL_PACKAGE}.messages.cred_offer.V20CredOffer", CRED_20_REQUEST: f"{PROTOCOL_PACKAGE}.messages.cred_request.V20CredRequest", CRED_20_ISSUE: f"{PROTOCOL_PACKAGE}.messages.cred_issue.V20CredIssue", @@ -43,18 +41,22 @@ CRED_20_PROPOSAL: { V20CredFormat.Format.INDY.api: "hlindy/cred-filter@v2.0", V20CredFormat.Format.LD_PROOF.api: "aries/ld-proof-vc-detail@v1.0", + V20CredFormat.Format.VC_DI.api: "hlindy/cred-filter@v2.0", }, CRED_20_OFFER: { V20CredFormat.Format.INDY.api: "hlindy/cred-abstract@v2.0", V20CredFormat.Format.LD_PROOF.api: "aries/ld-proof-vc-detail@v1.0", + V20CredFormat.Format.VC_DI.api: "didcomm/w3c-di-vc-offer@v0.1", }, CRED_20_REQUEST: { V20CredFormat.Format.INDY.api: "hlindy/cred-req@v2.0", V20CredFormat.Format.LD_PROOF.api: "aries/ld-proof-vc-detail@v1.0", + V20CredFormat.Format.VC_DI.api: "didcomm/w3c-di-vc-request@v0.1", }, CRED_20_ISSUE: { V20CredFormat.Format.INDY.api: "hlindy/cred@v2.0", V20CredFormat.Format.LD_PROOF.api: "aries/ld-proof-vc@v1.0", + V20CredFormat.Format.VC_DI.api: "didcomm/w3c-di-vc@v0.1", }, } diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/messages/cred_format.py b/aries_cloudagent/protocols/issue_credential/v2_0/messages/cred_format.py index 374a188a3a..0974ac2f38 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/messages/cred_format.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/messages/cred_format.py @@ -13,6 +13,7 @@ from .....utils.classloader import DeferLoad from ..models.detail.indy import V20CredExRecordIndy from ..models.detail.ld_proof import V20CredExRecordLDProof +from ..models.detail.vc_di import V20CredExRecordVCDI if TYPE_CHECKING: from ..formats.handler import V20CredFormatHandler @@ -60,6 +61,14 @@ class Format(Enum): ".formats.ld_proof.handler.LDProofCredFormatHandler" ), ) + VC_DI = FormatSpec( + "didcomm/", + V20CredExRecordVCDI, + DeferLoad( + "aries_cloudagent.protocols.issue_credential.v2_0" + ".formats.vc_di.handler.VCDICredFormatHandler" + ), + ) @classmethod def get(cls, label: Union[str, "V20CredFormat.Format"]): diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/models/detail/vc_di.py b/aries_cloudagent/protocols/issue_credential/v2_0/models/detail/vc_di.py new file mode 100644 index 0000000000..5020791f87 --- /dev/null +++ b/aries_cloudagent/protocols/issue_credential/v2_0/models/detail/vc_di.py @@ -0,0 +1,135 @@ +"""Indy-specific credential exchange information with non-secrets storage.""" + +from typing import Any, Mapping, Sequence + +from marshmallow import EXCLUDE, fields + +from ......core.profile import ProfileSession +from ......messaging.models.base_record import BaseRecord, BaseRecordSchema +from ......messaging.valid import ( + INDY_CRED_REV_ID_EXAMPLE, + INDY_CRED_REV_ID_VALIDATE, + INDY_REV_REG_ID_EXAMPLE, + INDY_REV_REG_ID_VALIDATE, + UUID4_EXAMPLE, +) +from .. import UNENCRYPTED_TAGS + + +class V20CredExRecordVCDI(BaseRecord): + """Credential exchange vc_di detail record.""" + + class Meta: + """V20CredExRecordIndy metadata.""" + + schema_class = "V20CredExRecordVCDISchema" + + RECORD_ID_NAME = "cred_ex_vc_di_id" + RECORD_TYPE = "vc_di_cred_ex_v20" + TAG_NAMES = {"~cred_ex_id"} if UNENCRYPTED_TAGS else {"cred_ex_id"} + RECORD_TOPIC = "issue_credential_v2_0_vc_di" + + def __init__( + self, + cred_ex_vc_di_id: str = None, + *, + cred_ex_id: str = None, + cred_id_stored: str = None, + cred_request_metadata: Mapping = None, + rev_reg_id: str = None, + cred_rev_id: str = None, + **kwargs, + ): + """Initialize indy credential exchange record details.""" + super().__init__(cred_ex_vc_di_id, **kwargs) + + self.cred_ex_id = cred_ex_id + self.cred_id_stored = cred_id_stored + self.cred_request_metadata = cred_request_metadata + self.rev_reg_id = rev_reg_id + self.cred_rev_id = cred_rev_id + + @property + def cred_ex_vc_di_id(self) -> str: + """Accessor for the ID associated with this exchange.""" + return self._id + + @property + def record_value(self) -> dict: + """Accessor for the JSON record value generated for this credential exchange.""" + return { + prop: getattr(self, prop) + for prop in ( + "cred_id_stored", + "cred_request_metadata", + "rev_reg_id", + "cred_rev_id", + ) + } + + @classmethod + async def query_by_cred_ex_id( + cls, + session: ProfileSession, + cred_ex_id: str, + ) -> Sequence["V20CredExRecordVCDI"]: + """Retrieve credential exchange indy detail record(s) by its cred ex id.""" + return await cls.query( + session=session, + tag_filter={"cred_ex_id": cred_ex_id}, + ) + + def __eq__(self, other: Any) -> bool: + """Comparison between records.""" + return super().__eq__(other) + + +class V20CredExRecordVCDISchema(BaseRecordSchema): + """Credential exchange indy detail record detail schema.""" + + class Meta: + """Credential exchange indy detail record schema metadata.""" + + model_class = V20CredExRecordVCDI + unknown = EXCLUDE + + cred_ex_indy_id = fields.Str( + required=False, + metadata={"description": "Record identifier", "example": UUID4_EXAMPLE}, + ) + cred_ex_id = fields.Str( + required=False, + metadata={ + "description": "Corresponding v2.0 credential exchange record identifier", + "example": UUID4_EXAMPLE, + }, + ) + cred_id_stored = fields.Str( + required=False, + metadata={ + "description": "Credential identifier stored in wallet", + "example": UUID4_EXAMPLE, + }, + ) + cred_request_metadata = fields.Dict( + required=False, + metadata={"description": "Credential request metadata for indy holder"}, + ) + rev_reg_id = fields.Str( + required=False, + validate=INDY_REV_REG_ID_VALIDATE, + metadata={ + "description": "Revocation registry identifier", + "example": INDY_REV_REG_ID_EXAMPLE, + }, + ) + cred_rev_id = fields.Str( + required=False, + validate=INDY_CRED_REV_ID_VALIDATE, + metadata={ + "description": ( + "Credential revocation identifier within revocation registry" + ), + "example": INDY_CRED_REV_ID_EXAMPLE, + }, + ) From f33e857b381cac4024b8ea845489a165c7224e37 Mon Sep 17 00:00:00 2001 From: tra371 Date: Thu, 15 Feb 2024 00:01:14 +0630 Subject: [PATCH 54/69] feat: implement VCDICredFormatHandler Signed-off-by: tra371 --- aries_cloudagent/anoncreds/issuer.py | 58 +++++++++ aries_cloudagent/indy/models/cred_request.py | 85 +++++++++++- .../v2_0/formats/vc_di/handler.py | 82 ++++++++---- demo/runners/faber.py | 121 +++++++++++++++++- demo/runners/support/agent.py | 57 +++------ 5 files changed, 328 insertions(+), 75 deletions(-) diff --git a/aries_cloudagent/anoncreds/issuer.py b/aries_cloudagent/anoncreds/issuer.py index 312bd8400f..eea459a0ca 100644 --- a/aries_cloudagent/anoncreds/issuer.py +++ b/aries_cloudagent/anoncreds/issuer.py @@ -632,3 +632,61 @@ async def create_credential( raise AnonCredsIssuerError("Error creating credential") from err return credential.to_json() + + +async def create_credential_vc_di( + self, + credential_offer: dict, + credential_request: dict, + credential_values: dict, +) -> str: + """Create Credential.""" + anoncreds_registry = self.profile.inject(AnonCredsRegistry) + schema_id = credential_offer["schema_id"] + schema_result = await anoncreds_registry.get_schema(self.profile, schema_id) + cred_def_id = credential_offer["cred_def_id"] + schema_attributes = schema_result.schema_value.attr_names + + try: + async with self.profile.session() as session: + cred_def = await session.handle.fetch(CATEGORY_CRED_DEF, cred_def_id) + cred_def_private = await session.handle.fetch( + CATEGORY_CRED_DEF_PRIVATE, cred_def_id + ) + except AskarError as err: + raise AnonCredsIssuerError("Error retrieving credential definition") from err + + if not cred_def or not cred_def_private: + raise AnonCredsIssuerError( + "Credential definition not found for credential issuance" + ) + + raw_values = {} + for attribute in schema_attributes: + # Ensure every attribute present in schema to be set. + # Extraneous attribute names are ignored. + try: + credential_value = credential_values[attribute] + except KeyError: + raise AnonCredsIssuerError( + "Provided credential values are missing a value " + f"for the schema attribute '{attribute}'" + ) + + raw_values[attribute] = str(credential_value) + + try: + credential = await asyncio.get_event_loop().run_in_executor( + None, + lambda: Credential.create( + cred_def.raw_value, + cred_def_private.raw_value, + credential_offer, + credential_request, + raw_values, + ), + ) + except AnoncredsError as err: + raise AnonCredsIssuerError("Error creating credential") from err + + return credential.to_json() diff --git a/aries_cloudagent/indy/models/cred_request.py b/aries_cloudagent/indy/models/cred_request.py index a722170c32..636e2b0f72 100644 --- a/aries_cloudagent/indy/models/cred_request.py +++ b/aries_cloudagent/indy/models/cred_request.py @@ -81,13 +81,14 @@ class Meta: ) -class BindingProof(BaseModel): +class AnoncredsLinkSecret(BaseModel): """Binding proof model.""" class Meta: """VCDI credential request schema metadata.""" schema_class = "BindingProofSchema" + unknown = EXCLUDE def __init__( self, @@ -107,13 +108,13 @@ def __init__( self.nonce = nonce -class BindingProofSchema(BaseModelSchema): +class AnoncredsLinkSecretSchema(BaseModelSchema): """VCDI credential request schema.""" class Meta: """VCDI credential request schema metadata.""" - model_class = BindingProof + model_class = AnoncredsLinkSecret unknown = EXCLUDE entropy = fields.Str( @@ -146,6 +147,76 @@ class Meta: ) +class DidcommSignedAttachment(BaseModel): + """Didcomm Signed Attachment Model.""" + + class Meta: + """Didcomm signed attachment metadata.""" + + schema_class = "DidcommSignedAttachmentSchema" + unknown = EXCLUDE + + def __init__(self, attachment_id: str = None, **kwargs): + """Initialize DidcommSignedAttachment.""" + super().__init__(**kwargs) + self.attachment_id = attachment_id + + +class DidcommSignedAttachmentSchema(BaseModelSchema): + """Didcomm Signed Attachment Schema.""" + + class Meta: + """Didcomm Signed Attachment schema metadata.""" + + model_class = DidcommSignedAttachment + + attachment_id = fields.str( + required=True, metadata={"description": "", "example": ""} + ) + + +class BindingProof(BaseModel): + """Binding Proof Model.""" + + class Meta: + """Binding proof metadata.""" + + schema_class = "BindingProofSchema" + unknown = EXCLUDE + + def __init__( + self, + anoncreds_link_secret: str = None, + didcomm_signed_attachment: str = None, + **kwargs, + ): + """Initialize binding proof.""" + super().__init__(**kwargs) + self.anoncreds_link_secret = anoncreds_link_secret + self.didcomm_signed_attachment = didcomm_signed_attachment + + +class BindingProofSchema(BaseModelSchema): + """Binding Proof Schema.""" + + class Meta: + """Binding proof schema metadata.""" + + model_class = BindingProof + + anoncreds_link_secret = fields.Nested( + AnoncredsLinkSecretSchema(), + required=True, + metadata={"description": "", "example": ""}, + ) + + didcomm_signed_attachment = fields.Nested( + DidcommSignedAttachmentSchema(), + required=True, + metadata={"description": "", "example": ""}, + ) + + class VCDICredRequest(BaseModel): """VCDI credential request model.""" @@ -175,8 +246,12 @@ class Meta: model_class = VCDICredRequest unknown = EXCLUDE - data_model_version = fields.str( + data_model_version = fields.Str( required=True, metadata={"description": "", "example": ""} ) - binding_proof = fields.str(required=True, metadata={"description": "", "example": ""}) + binding_proof = fields.Nested( + BindingProofSchema(), + required=True, + metadata={"description": "", "example": ""}, + ) diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py index 3a7bb4f0f4..7f37cd8541 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py @@ -3,10 +3,18 @@ import json import logging from typing import Mapping, Tuple +from aries_cloudagent.protocols.issue_credential.v2_0.manager import ( + V20CredManager, + V20CredManagerError, +) from aries_cloudagent.protocols.issue_credential.v2_0.models.detail.vc_di import ( V20CredExRecordVCDI, ) -from aries_cloudagent.vc.vc_ld.models.credential import VerifiableCredentialSchema +from aries_cloudagent.vc.vc_ld.manager import VcLdpManager, VcLdpManagerError +from aries_cloudagent.vc.vc_ld.models.credential import ( + VerifiableCredential, + VerifiableCredentialSchema, +) from marshmallow import RAISE @@ -18,8 +26,17 @@ AnonCredsIssuer, ) from ......indy.models.cred import IndyCredentialSchema -from ......indy.models.cred_abstract import IndyCredAbstractSchema, VCDICredAbstractSchema -from ......indy.models.cred_request import IndyCredRequestSchema, VCDICredRequestSchema +from ......indy.models.cred_abstract import ( + IndyCredAbstractSchema, + VCDICredAbstract, + VCDICredAbstractSchema, +) +from ......indy.models.cred_request import ( + BindingProof, + IndyCredRequestSchema, + VCDICredRequest, + VCDICredRequestSchema, +) from ......cache.base import BaseCache from ......ledger.base import BaseLedger from ......ledger.multiple_ledger.ledger_requests_executor import ( @@ -47,7 +64,6 @@ from ...messages.cred_proposal import V20CredProposal from ...messages.cred_request import V20CredRequest from ...models.cred_ex_record import V20CredExRecord -from ...models.detail.indy import V20CredExRecordIndy from ..handler import CredFormatAttachment, V20CredFormatError, V20CredFormatHandler LOGGER = logging.getLogger(__name__) @@ -260,11 +276,18 @@ async def create_request( holder_did = request_data.get("holder_did") if request_data else None cred_offer = cred_ex_record.cred_offer.attachment(VCDICredFormatHandler.format) - if "nonce" not in cred_offer: - raise V20CredFormatError("Missing nonce in credential offer") + if ( + "anoncreds_link_secret" in cred_offer["binding_method"] + and "nonce" not in cred_offer["binding_method"]["anoncreds_link_secret"] + ): + raise V20CredFormatError( + "Missing nonce in credential offer with anoncreds link secret binding method" + ) - nonce = cred_offer["nonce"] - cred_def_id = cred_offer["cred_def_id"] + nonce = cred_offer["binding_method"]["anoncreds_link_secret"]["nonce"] + cred_def_id = cred_offer["binding_method"]["anoncreds_link_secret"][ + "cred_def_id" + ] async def _create(): anoncreds_registry = self.profile.inject(AnonCredsRegistry) @@ -321,31 +344,36 @@ async def issue_credential( """Issue indy credential.""" await self._check_uniqueness(cred_ex_record.cred_ex_id) - cred_offer = cred_ex_record.cred_offer.attachment(VCDICredFormatHandler.format) - cred_request = cred_ex_record.cred_request.attachment( + attached_credential = cred_ex_record.cred_offer.attachment( VCDICredFormatHandler.format ) - cred_values = cred_ex_record.cred_offer.credential_preview.attr_dict(decode=False) - - issuer = AnonCredsIssuer(self.profile) - cred_def_id = cred_offer["cred_def_id"] - if await issuer.cred_def_supports_revocation(cred_def_id): - revocation = AnonCredsRevocation(self.profile) - cred_json, cred_rev_id, rev_reg_def_id = await revocation.create_credential( - cred_offer, cred_request, cred_values - ) - else: - # TODO - implement a separate create_credential for vcdi - cred_json = await issuer.create_credential( - cred_offer, cred_request, cred_values + detail_credential = VCDICredAbstract.deserialize(attached_credential) + binding_proof = cred_ex_record.cred_request.attachment( + VCDICredFormatHandler.format + ) + detail_proof = VCDICredRequest.deserialize(binding_proof) + manager = self.profile.inject(VcLdpManager) + # TODO - implement a separate create_credential for vcdi + assert detail_credential.credential and isinstance( + detail_credential.credential, VerifiableCredential + ) + assert detail_proof.binding_proof and isinstance( + detail_proof.binding_proof, BindingProof + ) + try: + vc = await manager.issue( + detail_credential.credential, detail_proof.binding_proof ) - cred_rev_id = None - rev_reg_def_id = None + except VcLdpManagerError as err: + raise V20CredFormatError("Failed to issue credential") from err + + result = self.get_format_data(CRED_20_ISSUE, vc.serialize()) - result = self.get_format_data(CRED_20_ISSUE, json.loads(cred_json)) + cred_rev_id = None + rev_reg_def_id = None async with self._profile.transaction() as txn: - detail_record = V20CredExRecordIndy( + detail_record = V20CredExRecordVCDI( cred_ex_id=cred_ex_record.cred_ex_id, rev_reg_id=rev_reg_def_id, cred_rev_id=cred_rev_id, diff --git a/demo/runners/faber.py b/demo/runners/faber.py index d8b9e4347f..be5a5b0b3c 100644 --- a/demo/runners/faber.py +++ b/demo/runners/faber.py @@ -19,6 +19,7 @@ from runners.support.agent import ( # noqa:E402 CRED_FORMAT_INDY, CRED_FORMAT_JSON_LD, + CRED_FORMAT_VC_DI, SIG_TYPE_BLS, ) from runners.support.utils import ( # noqa:E402 @@ -175,6 +176,31 @@ def generate_credential_offer(self, aip, cred_type, cred_def_id, exchange_tracin } return offer_request + elif cred_type == CRED_FORMAT_VC_DI: + self.cred_attrs[cred_def_id] = { + "name": "Alice Smith", + "date": "2018-05-28", + "degree": "Maths", + "birthdate_dateint": birth_date.strftime(birth_date_format), + "timestamp": str(int(time.time())), + } + + cred_preview = { + "@type": CRED_PREVIEW_TYPE, + "attributes": [ + {"name": n, "value": v} + for (n, v) in self.cred_attrs[cred_def_id].items() + ], + } + offer_request = { + "connection_id": self.connection_id, + "comment": f"Offer on cred def id {cred_def_id}", + "auto_remove": False, + "credential_preview": cred_preview, + "filter": {"vc_di": {"cred_def_id": cred_def_id}}, + "trace": exchange_tracing, + } + return offer_request else: raise Exception(f"Error invalid credential type: {self.cred_type}") @@ -314,6 +340,70 @@ def generate_proof_request_web_request( proof_request_web_request["connection_id"] = self.connection_id return proof_request_web_request + elif cred_type == CRED_FORMAT_VC_DI: + req_attrs = [ + { + "name": "name", + "restrictions": [{"schema_name": "degree schema"}], + }, + { + "name": "date", + "restrictions": [{"schema_name": "degree schema"}], + }, + ] + if revocation: + req_attrs.append( + { + "name": "degree", + "restrictions": [{"schema_name": "degree schema"}], + "non_revoked": {"to": int(time.time() - 1)}, + }, + ) + else: + req_attrs.append( + { + "name": "degree", + "restrictions": [{"schema_name": "degree schema"}], + } + ) + if SELF_ATTESTED: + # test self-attested claims + req_attrs.append( + {"name": "self_attested_thing"}, + ) + req_preds = [ + # test zero-knowledge proofs + { + "name": "birthdate_dateint", + "p_type": "<=", + "p_value": int(birth_date.strftime(birth_date_format)), + "restrictions": [{"schema_name": "degree schema"}], + } + ] + + vc_di_proof_request = { + "name": "Proof of Education", + "version": "1.0", + "requested_attributes": { + f"0_{req_attr['name']}_uuid": req_attr for req_attr in req_attrs + }, + "requested_predicates": { + f"0_{req_pred['name']}_GE_uuid": req_pred + for req_pred in req_preds + }, + } + + if revocation: + vc_di_proof_request["non_revoked"] = {"to": int(time.time())} + + proof_request_web_request = { + "presentation_request": {"vc_di": vc_di_proof_request}, + "trace": exchange_tracing, + } + if not connectionless: + proof_request_web_request["connection_id"] = self.connection_id + return proof_request_web_request + elif cred_type == CRED_FORMAT_JSON_LD: proof_request_web_request = { "comment": "test proof request for json-ld", @@ -445,7 +535,7 @@ async def main(args): else False ), ) - elif faber_agent.cred_type == CRED_FORMAT_JSON_LD: + elif faber_agent.cred_type == (CRED_FORMAT_JSON_LD or CRED_FORMAT_VC_DI): faber_agent.public_did = True await faber_agent.initialize(the_agent=agent) else: @@ -562,6 +652,14 @@ async def main(args): exchange_tracing, ) + elif faber_agent.cred_type == CRED_FORMAT_VC_DI: + offer_request = faber_agent.agent.generate_credential_offer( + faber_agent.aip, + faber_agent.cred_type, + faber_agent.cred_def_id, + exchange_tracing, + ) + else: raise Exception( f"Error invalid credential type: {faber_agent.cred_type}" @@ -611,6 +709,16 @@ async def main(args): ) ) + elif faber_agent.cred_type == CRED_FORMAT_VC_DI: + proof_request_web_request = ( + faber_agent.agent.generate_proof_request_web_request( + faber_agent.aip, + faber_agent.cred_type, + faber_agent.revocation, + exchange_tracing, + ) + ) + else: raise Exception( "Error invalid credential type:" + faber_agent.cred_type @@ -678,6 +786,17 @@ async def main(args): connectionless=True, ) ) + + elif faber_agent.cred_type == CRED_FORMAT_VC_DI: + proof_request_web_request = ( + faber_agent.agent.generate_proof_request_web_request( + faber_agent.aip, + faber_agent.cred_type, + faber_agent.revocation, + exchange_tracing, + connectionless=True, + ) + ) else: raise Exception( "Error invalid credential type:" + faber_agent.cred_type diff --git a/demo/runners/support/agent.py b/demo/runners/support/agent.py index a114ef00a3..0dc2a2f1d2 100644 --- a/demo/runners/support/agent.py +++ b/demo/runners/support/agent.py @@ -72,6 +72,7 @@ CRED_FORMAT_INDY = "indy" CRED_FORMAT_JSON_LD = "json-ld" +CRED_FORMAT_VC_DI = "vc_di" DID_METHOD_SOV = "sov" DID_METHOD_KEY = "key" KEY_TYPE_ED255 = "ed25519" @@ -369,9 +370,7 @@ async def register_schema_and_creddef_indy( log_msg("Schema ID:", schema_id) # Create a cred def for the schema - cred_def_tag = ( - tag if tag else (self.ident + "." + schema_name).replace(" ", "_") - ) + cred_def_tag = tag if tag else (self.ident + "." + schema_name).replace(" ", "_") credential_definition_body = { "schema_id": schema_id, "support_revocation": support_revocation, @@ -401,9 +400,7 @@ async def register_schema_and_creddef_indy( credential_definition_response = await self.admin_GET( "/credential-definitions/created" ) - if 0 == len( - credential_definition_response["credential_definition_ids"] - ): + if 0 == len(credential_definition_response["credential_definition_ids"]): await asyncio.sleep(1.0) attempts = attempts - 1 credential_definition_id = credential_definition_response[ @@ -450,9 +447,7 @@ async def register_schema_and_creddef_anoncreds( log_msg("Schema ID:", schema_id) # Create a cred def for the schema - cred_def_tag = ( - tag if tag else (self.ident + "." + schema_name).replace(" ", "_") - ) + cred_def_tag = tag if tag else (self.ident + "." + schema_name).replace(" ", "_") max_cred_num = revocation_registry_size if revocation_registry_size else 0 credential_definition_body = { "credential_definition": { @@ -488,9 +483,7 @@ async def register_schema_and_creddef_anoncreds( credential_definition_response = await self.admin_GET( "/anoncreds/credential-definitions" ) - if 0 == len( - credential_definition_response["credential_definition_ids"] - ): + if 0 == len(credential_definition_response["credential_definition_ids"]): await asyncio.sleep(1.0) attempts = attempts - 1 credential_definition_id = credential_definition_response[ @@ -707,9 +700,7 @@ async def register_did( nym_info = data else: log_msg("using ledger: " + ledger_url + "/register") - resp = await self.client_session.post( - ledger_url + "/register", json=data - ) + resp = await self.client_session.post(ledger_url + "/register", json=data) if resp.status != 200: raise Exception( f"Error registering DID {data}, response code {resp.status}" @@ -906,9 +897,7 @@ def _process(self, args, env, loop): def get_process_args(self): return list( - flatten( - ([PYTHON, "-m", "aries_cloudagent", "start"], self.get_agent_args()) - ) + flatten(([PYTHON, "-m", "aries_cloudagent", "start"], self.get_agent_args())) ) async def start_process(self, python_path: str = None, wait: bool = True): @@ -1124,9 +1113,7 @@ async def admin_GET( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = ( - "Bearer " + self.managed_wallet_params["token"] - ) + headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] response = await self.admin_request( "GET", path, None, text, params, headers=headers ) @@ -1178,9 +1165,7 @@ async def admin_POST( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = ( - "Bearer " + self.managed_wallet_params["token"] - ) + headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] response = await self.admin_request( "POST", path, data, text, params, headers=headers ) @@ -1201,9 +1186,7 @@ async def admin_PATCH( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = ( - "Bearer " + self.managed_wallet_params["token"] - ) + headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] return await self.admin_request( "PATCH", path, data, text, params, headers=headers ) @@ -1218,9 +1201,7 @@ async def admin_PUT( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = ( - "Bearer " + self.managed_wallet_params["token"] - ) + headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] return await self.admin_request( "PUT", path, data, text, params, headers=headers ) @@ -1240,9 +1221,7 @@ async def admin_DELETE( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = ( - "Bearer " + self.managed_wallet_params["token"] - ) + headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] response = await self.admin_request( "DELETE", path, data, text, params, headers=headers ) @@ -1261,9 +1240,7 @@ async def admin_GET_FILE(self, path, params=None, headers=None) -> bytes: if self.multitenant: if not headers: headers = {} - headers["Authorization"] = ( - "Bearer " + self.managed_wallet_params["token"] - ) + headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] params = {k: v for (k, v) in (params or {}).items() if v is not None} resp = await self.client_session.request( "GET", self.admin_url + path, params=params, headers=headers @@ -1279,9 +1256,7 @@ async def admin_PUT_FILE(self, files, url, params=None, headers=None) -> bytes: if self.multitenant: if not headers: headers = {} - headers["Authorization"] = ( - "Bearer " + self.managed_wallet_params["token"] - ) + headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] params = {k: v for (k, v) in (params or {}).items() if v is not None} resp = await self.client_session.request( "PUT", url, params=params, data=files, headers=headers @@ -1653,9 +1628,7 @@ async def handle_connections(self, message): # setup endorser meta-data on our connection log_msg("Setup endorser agent meta-data ...") await self.admin_POST( - "/transactions/" - + self.endorser_connection_id - + "/set-endorser-role", + "/transactions/" + self.endorser_connection_id + "/set-endorser-role", params={"transaction_my_job": "TRANSACTION_ENDORSER"}, ) From aa096b08a61846b2a02431880f85ee26605969ed Mon Sep 17 00:00:00 2001 From: tra371 Date: Tue, 20 Feb 2024 00:11:57 +0630 Subject: [PATCH 55/69] feat: fix schemas on cred_abstract and cred_request, improve on vc_di support on alice-faber demo Signed-off-by: tra371 --- aries_cloudagent/anoncreds/issuer.py | 107 +++++++++--------- aries_cloudagent/indy/models/cred_abstract.py | 46 +++++++- aries_cloudagent/indy/models/cred_request.py | 11 +- .../v2_0/formats/vc_di/handler.py | 22 ++-- .../issue_credential/v2_0/message_types.py | 4 +- demo/runners/agent_container.py | 66 +++++------ demo/runners/faber.py | 2 +- demo/runners/support/agent.py | 4 +- 8 files changed, 145 insertions(+), 117 deletions(-) diff --git a/aries_cloudagent/anoncreds/issuer.py b/aries_cloudagent/anoncreds/issuer.py index eea459a0ca..6ddf89fcd1 100644 --- a/aries_cloudagent/anoncreds/issuer.py +++ b/aries_cloudagent/anoncreds/issuer.py @@ -551,9 +551,7 @@ async def create_credential_offer(self, credential_definition_id: str) -> str: CATEGORY_CRED_DEF_KEY_PROOF, credential_definition_id ) except AskarError as err: - raise AnonCredsIssuerError( - "Error retrieving credential definition" - ) from err + raise AnonCredsIssuerError("Error retrieving credential definition") from err if not cred_def or not key_proof: raise AnonCredsIssuerError( "Credential definition not found for credential offer" @@ -594,9 +592,7 @@ async def create_credential( CATEGORY_CRED_DEF_PRIVATE, cred_def_id ) except AskarError as err: - raise AnonCredsIssuerError( - "Error retrieving credential definition" - ) from err + raise AnonCredsIssuerError("Error retrieving credential definition") from err if not cred_def or not cred_def_private: raise AnonCredsIssuerError( @@ -633,60 +629,59 @@ async def create_credential( return credential.to_json() + async def create_credential_vc_di( + self, + credential_offer: dict, + credential_request: dict, + credential_values: dict, + ) -> str: + """Create Credential.""" + anoncreds_registry = self.profile.inject(AnonCredsRegistry) + schema_id = credential_offer["schema_id"] + schema_result = await anoncreds_registry.get_schema(self.profile, schema_id) + cred_def_id = credential_offer["cred_def_id"] + schema_attributes = schema_result.schema_value.attr_names -async def create_credential_vc_di( - self, - credential_offer: dict, - credential_request: dict, - credential_values: dict, -) -> str: - """Create Credential.""" - anoncreds_registry = self.profile.inject(AnonCredsRegistry) - schema_id = credential_offer["schema_id"] - schema_result = await anoncreds_registry.get_schema(self.profile, schema_id) - cred_def_id = credential_offer["cred_def_id"] - schema_attributes = schema_result.schema_value.attr_names - - try: - async with self.profile.session() as session: - cred_def = await session.handle.fetch(CATEGORY_CRED_DEF, cred_def_id) - cred_def_private = await session.handle.fetch( - CATEGORY_CRED_DEF_PRIVATE, cred_def_id - ) - except AskarError as err: - raise AnonCredsIssuerError("Error retrieving credential definition") from err - - if not cred_def or not cred_def_private: - raise AnonCredsIssuerError( - "Credential definition not found for credential issuance" - ) - - raw_values = {} - for attribute in schema_attributes: - # Ensure every attribute present in schema to be set. - # Extraneous attribute names are ignored. try: - credential_value = credential_values[attribute] - except KeyError: + async with self.profile.session() as session: + cred_def = await session.handle.fetch(CATEGORY_CRED_DEF, cred_def_id) + cred_def_private = await session.handle.fetch( + CATEGORY_CRED_DEF_PRIVATE, cred_def_id + ) + except AskarError as err: + raise AnonCredsIssuerError("Error retrieving credential definition") from err + + if not cred_def or not cred_def_private: raise AnonCredsIssuerError( - "Provided credential values are missing a value " - f"for the schema attribute '{attribute}'" + "Credential definition not found for credential issuance" ) - raw_values[attribute] = str(credential_value) + raw_values = {} + for attribute in schema_attributes: + # Ensure every attribute present in schema to be set. + # Extraneous attribute names are ignored. + try: + credential_value = credential_values[attribute] + except KeyError: + raise AnonCredsIssuerError( + "Provided credential values are missing a value " + f"for the schema attribute '{attribute}'" + ) - try: - credential = await asyncio.get_event_loop().run_in_executor( - None, - lambda: Credential.create( - cred_def.raw_value, - cred_def_private.raw_value, - credential_offer, - credential_request, - raw_values, - ), - ) - except AnoncredsError as err: - raise AnonCredsIssuerError("Error creating credential") from err + raw_values[attribute] = str(credential_value) - return credential.to_json() + try: + credential = await asyncio.get_event_loop().run_in_executor( + None, + lambda: Credential.create( + cred_def.raw_value, + cred_def_private.raw_value, + credential_offer, + credential_request, + raw_values, + ), + ) + except AnoncredsError as err: + raise AnonCredsIssuerError("Error creating credential") from err + + return credential.to_json() diff --git a/aries_cloudagent/indy/models/cred_abstract.py b/aries_cloudagent/indy/models/cred_abstract.py index c5e46f233b..999ccb8c2f 100644 --- a/aries_cloudagent/indy/models/cred_abstract.py +++ b/aries_cloudagent/indy/models/cred_abstract.py @@ -180,6 +180,12 @@ def __init__( class AnoncredsLinkSecretSchema(BaseModelSchema): """Anoncreds Link Secret Schema.""" + class Meta: + """AnoncredsLinkSecret schema metadata.""" + + model_class = AnoncredsLinkSecret + unknown = EXCLUDE + nonce = fields.Str( required=True, validate=NUM_STR_WHOLE_VALIDATE, @@ -230,6 +236,12 @@ def __init__( class DidcommSignedAttachmentSchema(BaseModelSchema): """Didcomm Signed Attachment Schema.""" + class Meta: + """Didcomm signed attachment schema metadata.""" + + model_class = DidcommSignedAttachment + unknown = EXCLUDE + algs_supported = fields.List(fields.Str(), required=True) did_methods_supported = fields.List(fields.Str(), required=True) @@ -244,9 +256,35 @@ class DidcommSignedAttachmentSchema(BaseModelSchema): ) +class BindingMethod(BaseModel): + """Binding Method Model.""" + + class Meta: + """Binding method metadata.""" + + schema_class = "BindingMethodSchema" + + def __init__( + self, + anoncreds_link_secret: Union[dict, AnoncredsLinkSecret] = None, + didcomm_signed_attachment: Union[dict, DidcommSignedAttachment] = None, + **kwargs, + ): + """Initialize values for DidcommSignedAttachment.""" + super().__init__(**kwargs) + self.anoncreds_link_secret = anoncreds_link_secret + self.didcomm_signed_attachment = didcomm_signed_attachment + + class BindingMethodSchema(BaseModelSchema): """VCDI Binding Method Schema.""" + class Meta: + """VCDI binding method schema metadata.""" + + model_class = BindingMethod + unknown = EXCLUDE + anoncreds_link_secret = fields.Nested(AnoncredsLinkSecretSchema, required=False) didcomm_signed_attachment = fields.Nested( DidcommSignedAttachmentSchema, required=True @@ -263,7 +301,7 @@ class Meta: def __init__( self, - data_model_versions_supported: str = None, + data_model_versions_supported: Sequence[str] = None, binding_required: str = None, binding_methods: str = None, credential: Union[dict, VerifiableCredential] = None, @@ -294,7 +332,7 @@ class Meta: unknown = EXCLUDE data_model_versions_supported = fields.List( - required=True, validate="", metadata={"description": "", "example": ""} + fields.Str(), required=True, metadata={"description": "", "example": ""} ) binding_required = fields.Bool( @@ -308,5 +346,7 @@ class Meta: ) credential = fields.Nested( - CredentialSchema(), required=True, metadata={"description": "", "example": ""} + CredentialSchema(), + required=True, + metadata={"description": "", "example": ""}, ) diff --git a/aries_cloudagent/indy/models/cred_request.py b/aries_cloudagent/indy/models/cred_request.py index 636e2b0f72..e90fd9e8fc 100644 --- a/aries_cloudagent/indy/models/cred_request.py +++ b/aries_cloudagent/indy/models/cred_request.py @@ -8,6 +8,8 @@ from ...messaging.valid import ( INDY_CRED_DEF_ID_EXAMPLE, INDY_CRED_DEF_ID_VALIDATE, + INDY_DID_EXAMPLE, + INDY_DID_VALIDATE, UUID4_EXAMPLE, NUM_STR_WHOLE_EXAMPLE, NUM_STR_WHOLE_VALIDATE, @@ -88,7 +90,6 @@ class Meta: """VCDI credential request schema metadata.""" schema_class = "BindingProofSchema" - unknown = EXCLUDE def __init__( self, @@ -154,7 +155,6 @@ class Meta: """Didcomm signed attachment metadata.""" schema_class = "DidcommSignedAttachmentSchema" - unknown = EXCLUDE def __init__(self, attachment_id: str = None, **kwargs): """Initialize DidcommSignedAttachment.""" @@ -169,10 +169,9 @@ class Meta: """Didcomm Signed Attachment schema metadata.""" model_class = DidcommSignedAttachment + unknown = EXCLUDE - attachment_id = fields.str( - required=True, metadata={"description": "", "example": ""} - ) + attachment_id = fields.Str(required=True, metadata={"description": "", "example": ""}) class BindingProof(BaseModel): @@ -182,7 +181,6 @@ class Meta: """Binding proof metadata.""" schema_class = "BindingProofSchema" - unknown = EXCLUDE def __init__( self, @@ -203,6 +201,7 @@ class Meta: """Binding proof schema metadata.""" model_class = BindingProof + unknown = EXCLUDE anoncreds_link_secret = fields.Nested( AnoncredsLinkSecretSchema(), diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py index 7f37cd8541..d29095f970 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py @@ -188,7 +188,7 @@ async def _match_sent_cred_def_id(self, tag_query: Mapping[str, str]) -> str: async def create_proposal( self, cred_ex_record: V20CredExRecord, proposal_data: Mapping[str, str] ) -> Tuple[V20CredFormat, AttachDecorator]: - """Create indy credential proposal.""" + """Create vc_di credential proposal.""" if proposal_data is None: proposal_data = {} @@ -260,15 +260,15 @@ async def _create(): async def receive_offer( self, cred_ex_record: V20CredExRecord, cred_offer_message: V20CredOffer ) -> None: - """Receive indy credential offer.""" + """Receive vcdi credential offer.""" async def create_request( self, cred_ex_record: V20CredExRecord, request_data: Mapping = None ) -> CredFormatAttachment: - """Create indy credential request.""" + """Create vcdi credential request.""" if cred_ex_record.state != V20CredExRecord.STATE_OFFER_RECEIVED: raise V20CredFormatError( - "Indy issue credential format cannot start from credential request" + "vcdi issue credential format cannot start from credential request" ) await self._check_uniqueness(cred_ex_record.cred_ex_id) @@ -285,9 +285,7 @@ async def create_request( ) nonce = cred_offer["binding_method"]["anoncreds_link_secret"]["nonce"] - cred_def_id = cred_offer["binding_method"]["anoncreds_link_secret"][ - "cred_def_id" - ] + cred_def_id = cred_offer["binding_method"]["anoncreds_link_secret"]["cred_def_id"] async def _create(): anoncreds_registry = self.profile.inject(AnonCredsRegistry) @@ -332,16 +330,16 @@ async def _create(): async def receive_request( self, cred_ex_record: V20CredExRecord, cred_request_message: V20CredRequest ) -> None: - """Receive indy credential request.""" + """Receive vcdi credential request.""" if not cred_ex_record.cred_offer: raise V20CredFormatError( - "Indy issue credential format cannot start from credential request" + "vcdi issue credential format cannot start from credential request" ) async def issue_credential( self, cred_ex_record: V20CredExRecord, retries: int = 5 ) -> CredFormatAttachment: - """Issue indy credential.""" + """Issue vcdi credential.""" await self._check_uniqueness(cred_ex_record.cred_ex_id) attached_credential = cred_ex_record.cred_offer.attachment( @@ -402,7 +400,7 @@ async def issue_credential( async def receive_credential( self, cred_ex_record: V20CredExRecord, cred_issue_message: V20CredIssue ) -> None: - """Receive indy credential. + """Receive vcdi credential. Validation is done in the store credential step. """ @@ -410,7 +408,7 @@ async def receive_credential( async def store_credential( self, cred_ex_record: V20CredExRecord, cred_id: str = None ) -> None: - """Store indy credential.""" + """Store vcdi credential.""" cred = cred_ex_record.cred_issue.attachment(VCDICredFormatHandler.format) rev_reg_def = None diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py b/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py index b2b9e4325e..81cb20a244 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py @@ -22,7 +22,9 @@ MESSAGE_TYPES = DIDCommPrefix.qualify_all( { - CRED_20_PROPOSAL: (f"{PROTOCOL_PACKAGE}.messages.cred_proposal.V20CredProposal"), + CRED_20_PROPOSAL: ( + f"{PROTOCOL_PACKAGE}.messages.cred_proposal.V20CredProposal" + ), CRED_20_OFFER: f"{PROTOCOL_PACKAGE}.messages.cred_offer.V20CredOffer", CRED_20_REQUEST: f"{PROTOCOL_PACKAGE}.messages.cred_request.V20CredRequest", CRED_20_ISSUE: f"{PROTOCOL_PACKAGE}.messages.cred_issue.V20CredIssue", diff --git a/demo/runners/agent_container.py b/demo/runners/agent_container.py index b2db5ed138..ef3a0ac6c5 100644 --- a/demo/runners/agent_container.py +++ b/demo/runners/agent_container.py @@ -16,6 +16,7 @@ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from runners.support.agent import ( # noqa:E402 + CRED_FORMAT_VC_DI, DemoAgent, default_genesis_txns, start_mediator_agent, @@ -214,9 +215,7 @@ async def handle_issue_credential(self, message): cred_attrs = self.cred_attrs[message["credential_definition_id"]] cred_preview = { "@type": CRED_PREVIEW_TYPE, - "attributes": [ - {"name": n, "value": v} for (n, v) in cred_attrs.items() - ], + "attributes": [{"name": n, "value": v} for (n, v) in cred_attrs.items()], } try: cred_ex_rec = await self.admin_POST( @@ -423,9 +422,7 @@ async def handle_present_proof_v2_0(self, message): pres_request_indy = ( message["by_format"].get("pres_request", {}).get("indy") ) - pres_request_dif = ( - message["by_format"].get("pres_request", {}).get("dif") - ) + pres_request_dif = message["by_format"].get("pres_request", {}).get("dif") request = {} if not pres_request_dif and not pres_request_indy: @@ -602,9 +599,7 @@ async def generate_invitation( self._connection_ready = asyncio.Future() with log_timer("Generate invitation duration:"): # Generate an invitation - log_status( - "#7 Create a connection to alice and print out the invite details" - ) + log_status("#7 Create a connection to alice and print out the invite details") invi_rec = await self.get_invite( use_did_exchange, auto_accept=auto_accept, @@ -816,9 +811,7 @@ async def initialize( raise Exception("Endorser agent returns None :-(") # set the endorser invite so the agent can auto-connect - self.agent.endorser_invite = ( - self.endorser_agent.endorser_multi_invitation_url - ) + self.agent.endorser_invite = self.endorser_agent.endorser_multi_invitation_url self.agent.endorser_did = self.endorser_agent.endorser_public_did else: self.endorser_agent = None @@ -854,25 +847,17 @@ async def initialize( if self.mediation: # we need to pre-connect the agent to its mediator self.agent.log("Connect wallet to mediator ...") - if not await connect_wallet_to_mediator( - self.agent, self.mediator_agent - ): + if not await connect_wallet_to_mediator(self.agent, self.mediator_agent): raise Exception("Mediation setup FAILED :-(") if self.endorser_agent: self.agent.log("Connect wallet to endorser ...") - if not await connect_wallet_to_endorser( - self.agent, self.endorser_agent - ): + if not await connect_wallet_to_endorser(self.agent, self.endorser_agent): raise Exception("Endorser setup FAILED :-(") if self.taa_accept: await self.agent.taa_accept() # if we are an author, create our public DID here ... - if ( - self.endorser_role - and self.endorser_role == "author" - and self.endorser_agent - ): + if self.endorser_role and self.endorser_role == "author" and self.endorser_agent: if self.public_did and self.cred_type != CRED_FORMAT_JSON_LD: new_did = await self.agent.admin_POST("/wallet/did/create") self.agent.did = new_did["result"]["did"] @@ -905,9 +890,7 @@ async def create_schema_and_cred_def( ): if not self.public_did: raise Exception("Can't create a schema/cred def without a public DID :-(") - if self.cred_type in [ - CRED_FORMAT_INDY, - ]: + if self.cred_type in [CRED_FORMAT_INDY, CRED_FORMAT_VC_DI]: # need to redister schema and cred def on the ledger self.cred_def_id = await self.agent.create_schema_and_cred_def( schema_name, @@ -974,6 +957,25 @@ async def issue_credential( return cred_exchange + elif self.cred_type == CRED_FORMAT_VC_DI: + cred_preview = { + "@type": CRED_PREVIEW_TYPE, + "attributes": cred_attrs, + } + offer_request = { + "connection_id": self.agent.connection_id, + "comment": f"Offer on cred def id {cred_def_id}", + "auto_remove": False, + "credential_preview": cred_preview, + "filter": {"vc_di": {"cred_def_id": cred_def_id}}, + "trace": self.exchange_tracing, + } + cred_exchange = await self.agent.admin_POST( + "/issue-credential-2.0/send-offer", offer_request + ) + + return cred_exchange + elif self.cred_type == CRED_FORMAT_JSON_LD: # TODO create and send the json-ld credential offer pass @@ -1015,9 +1017,7 @@ async def receive_credential( async def request_proof(self, proof_request, explicit_revoc_required: bool = False): log_status("#20 Request proof of degree from alice") - if self.cred_type in [ - CRED_FORMAT_INDY, - ]: + if self.cred_type in [CRED_FORMAT_INDY, CRED_FORMAT_VC_DI]: indy_proof_request = { "name": ( proof_request["name"] @@ -1100,9 +1100,7 @@ async def verify_proof(self, proof_request): # log_status(f">>> last proof received: {self.agent.last_proof_received}") - if self.cred_type in [ - CRED_FORMAT_INDY, - ]: + if self.cred_type in [CRED_FORMAT_INDY, CRED_FORMAT_VC_DI]: # return verified status return self.agent.last_proof_received["verified"] @@ -1304,9 +1302,7 @@ def arg_parser(ident: str = None, port: int = 8020): metavar=(""), help="API level (10 or 20 (default))", ) - parser.add_argument( - "--timing", action="store_true", help="Enable timing information" - ) + parser.add_argument("--timing", action="store_true", help="Enable timing information") parser.add_argument( "--multitenant", action="store_true", help="Enable multitenancy options" ) diff --git a/demo/runners/faber.py b/demo/runners/faber.py index be5a5b0b3c..2de31cb06d 100644 --- a/demo/runners/faber.py +++ b/demo/runners/faber.py @@ -535,7 +535,7 @@ async def main(args): else False ), ) - elif faber_agent.cred_type == (CRED_FORMAT_JSON_LD or CRED_FORMAT_VC_DI): + elif faber_agent.cred_type in [CRED_FORMAT_JSON_LD, CRED_FORMAT_VC_DI]: faber_agent.public_did = True await faber_agent.initialize(the_agent=agent) else: diff --git a/demo/runners/support/agent.py b/demo/runners/support/agent.py index 0dc2a2f1d2..1ba14c6554 100644 --- a/demo/runners/support/agent.py +++ b/demo/runners/support/agent.py @@ -668,9 +668,7 @@ async def register_did( role: str = "TRUST_ANCHOR", cred_type: str = CRED_FORMAT_INDY, ): - if cred_type in [ - CRED_FORMAT_INDY, - ]: + if cred_type in [CRED_FORMAT_INDY, CRED_FORMAT_VC_DI]: # if registering a did for issuing indy credentials, publish the did on the ledger self.log(f"Registering {self.ident} ...") if not ledger_url: From 43e43bcdf25d668cd88db57e49bc022c4a584e6a Mon Sep 17 00:00:00 2001 From: tra371 Date: Tue, 20 Feb 2024 00:14:52 +0630 Subject: [PATCH 56/69] fix: reformat code with black Signed-off-by: tra371 --- aries_cloudagent/anoncreds/issuer.py | 12 +++- aries_cloudagent/indy/models/cred_request.py | 4 +- .../v2_0/formats/vc_di/handler.py | 4 +- demo/runners/agent_container.py | 34 ++++++++--- demo/runners/support/agent.py | 56 ++++++++++++++----- 5 files changed, 83 insertions(+), 27 deletions(-) diff --git a/aries_cloudagent/anoncreds/issuer.py b/aries_cloudagent/anoncreds/issuer.py index 6ddf89fcd1..2dfb2dcb33 100644 --- a/aries_cloudagent/anoncreds/issuer.py +++ b/aries_cloudagent/anoncreds/issuer.py @@ -551,7 +551,9 @@ async def create_credential_offer(self, credential_definition_id: str) -> str: CATEGORY_CRED_DEF_KEY_PROOF, credential_definition_id ) except AskarError as err: - raise AnonCredsIssuerError("Error retrieving credential definition") from err + raise AnonCredsIssuerError( + "Error retrieving credential definition" + ) from err if not cred_def or not key_proof: raise AnonCredsIssuerError( "Credential definition not found for credential offer" @@ -592,7 +594,9 @@ async def create_credential( CATEGORY_CRED_DEF_PRIVATE, cred_def_id ) except AskarError as err: - raise AnonCredsIssuerError("Error retrieving credential definition") from err + raise AnonCredsIssuerError( + "Error retrieving credential definition" + ) from err if not cred_def or not cred_def_private: raise AnonCredsIssuerError( @@ -649,7 +653,9 @@ async def create_credential_vc_di( CATEGORY_CRED_DEF_PRIVATE, cred_def_id ) except AskarError as err: - raise AnonCredsIssuerError("Error retrieving credential definition") from err + raise AnonCredsIssuerError( + "Error retrieving credential definition" + ) from err if not cred_def or not cred_def_private: raise AnonCredsIssuerError( diff --git a/aries_cloudagent/indy/models/cred_request.py b/aries_cloudagent/indy/models/cred_request.py index e90fd9e8fc..4438a99259 100644 --- a/aries_cloudagent/indy/models/cred_request.py +++ b/aries_cloudagent/indy/models/cred_request.py @@ -171,7 +171,9 @@ class Meta: model_class = DidcommSignedAttachment unknown = EXCLUDE - attachment_id = fields.Str(required=True, metadata={"description": "", "example": ""}) + attachment_id = fields.Str( + required=True, metadata={"description": "", "example": ""} + ) class BindingProof(BaseModel): diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py index d29095f970..50a8e40979 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py @@ -285,7 +285,9 @@ async def create_request( ) nonce = cred_offer["binding_method"]["anoncreds_link_secret"]["nonce"] - cred_def_id = cred_offer["binding_method"]["anoncreds_link_secret"]["cred_def_id"] + cred_def_id = cred_offer["binding_method"]["anoncreds_link_secret"][ + "cred_def_id" + ] async def _create(): anoncreds_registry = self.profile.inject(AnonCredsRegistry) diff --git a/demo/runners/agent_container.py b/demo/runners/agent_container.py index ef3a0ac6c5..d7f4f3c09d 100644 --- a/demo/runners/agent_container.py +++ b/demo/runners/agent_container.py @@ -215,7 +215,9 @@ async def handle_issue_credential(self, message): cred_attrs = self.cred_attrs[message["credential_definition_id"]] cred_preview = { "@type": CRED_PREVIEW_TYPE, - "attributes": [{"name": n, "value": v} for (n, v) in cred_attrs.items()], + "attributes": [ + {"name": n, "value": v} for (n, v) in cred_attrs.items() + ], } try: cred_ex_rec = await self.admin_POST( @@ -422,7 +424,9 @@ async def handle_present_proof_v2_0(self, message): pres_request_indy = ( message["by_format"].get("pres_request", {}).get("indy") ) - pres_request_dif = message["by_format"].get("pres_request", {}).get("dif") + pres_request_dif = ( + message["by_format"].get("pres_request", {}).get("dif") + ) request = {} if not pres_request_dif and not pres_request_indy: @@ -599,7 +603,9 @@ async def generate_invitation( self._connection_ready = asyncio.Future() with log_timer("Generate invitation duration:"): # Generate an invitation - log_status("#7 Create a connection to alice and print out the invite details") + log_status( + "#7 Create a connection to alice and print out the invite details" + ) invi_rec = await self.get_invite( use_did_exchange, auto_accept=auto_accept, @@ -811,7 +817,9 @@ async def initialize( raise Exception("Endorser agent returns None :-(") # set the endorser invite so the agent can auto-connect - self.agent.endorser_invite = self.endorser_agent.endorser_multi_invitation_url + self.agent.endorser_invite = ( + self.endorser_agent.endorser_multi_invitation_url + ) self.agent.endorser_did = self.endorser_agent.endorser_public_did else: self.endorser_agent = None @@ -847,17 +855,25 @@ async def initialize( if self.mediation: # we need to pre-connect the agent to its mediator self.agent.log("Connect wallet to mediator ...") - if not await connect_wallet_to_mediator(self.agent, self.mediator_agent): + if not await connect_wallet_to_mediator( + self.agent, self.mediator_agent + ): raise Exception("Mediation setup FAILED :-(") if self.endorser_agent: self.agent.log("Connect wallet to endorser ...") - if not await connect_wallet_to_endorser(self.agent, self.endorser_agent): + if not await connect_wallet_to_endorser( + self.agent, self.endorser_agent + ): raise Exception("Endorser setup FAILED :-(") if self.taa_accept: await self.agent.taa_accept() # if we are an author, create our public DID here ... - if self.endorser_role and self.endorser_role == "author" and self.endorser_agent: + if ( + self.endorser_role + and self.endorser_role == "author" + and self.endorser_agent + ): if self.public_did and self.cred_type != CRED_FORMAT_JSON_LD: new_did = await self.agent.admin_POST("/wallet/did/create") self.agent.did = new_did["result"]["did"] @@ -1302,7 +1318,9 @@ def arg_parser(ident: str = None, port: int = 8020): metavar=(""), help="API level (10 or 20 (default))", ) - parser.add_argument("--timing", action="store_true", help="Enable timing information") + parser.add_argument( + "--timing", action="store_true", help="Enable timing information" + ) parser.add_argument( "--multitenant", action="store_true", help="Enable multitenancy options" ) diff --git a/demo/runners/support/agent.py b/demo/runners/support/agent.py index 1ba14c6554..35436478d3 100644 --- a/demo/runners/support/agent.py +++ b/demo/runners/support/agent.py @@ -370,7 +370,9 @@ async def register_schema_and_creddef_indy( log_msg("Schema ID:", schema_id) # Create a cred def for the schema - cred_def_tag = tag if tag else (self.ident + "." + schema_name).replace(" ", "_") + cred_def_tag = ( + tag if tag else (self.ident + "." + schema_name).replace(" ", "_") + ) credential_definition_body = { "schema_id": schema_id, "support_revocation": support_revocation, @@ -400,7 +402,9 @@ async def register_schema_and_creddef_indy( credential_definition_response = await self.admin_GET( "/credential-definitions/created" ) - if 0 == len(credential_definition_response["credential_definition_ids"]): + if 0 == len( + credential_definition_response["credential_definition_ids"] + ): await asyncio.sleep(1.0) attempts = attempts - 1 credential_definition_id = credential_definition_response[ @@ -447,7 +451,9 @@ async def register_schema_and_creddef_anoncreds( log_msg("Schema ID:", schema_id) # Create a cred def for the schema - cred_def_tag = tag if tag else (self.ident + "." + schema_name).replace(" ", "_") + cred_def_tag = ( + tag if tag else (self.ident + "." + schema_name).replace(" ", "_") + ) max_cred_num = revocation_registry_size if revocation_registry_size else 0 credential_definition_body = { "credential_definition": { @@ -483,7 +489,9 @@ async def register_schema_and_creddef_anoncreds( credential_definition_response = await self.admin_GET( "/anoncreds/credential-definitions" ) - if 0 == len(credential_definition_response["credential_definition_ids"]): + if 0 == len( + credential_definition_response["credential_definition_ids"] + ): await asyncio.sleep(1.0) attempts = attempts - 1 credential_definition_id = credential_definition_response[ @@ -698,7 +706,9 @@ async def register_did( nym_info = data else: log_msg("using ledger: " + ledger_url + "/register") - resp = await self.client_session.post(ledger_url + "/register", json=data) + resp = await self.client_session.post( + ledger_url + "/register", json=data + ) if resp.status != 200: raise Exception( f"Error registering DID {data}, response code {resp.status}" @@ -895,7 +905,9 @@ def _process(self, args, env, loop): def get_process_args(self): return list( - flatten(([PYTHON, "-m", "aries_cloudagent", "start"], self.get_agent_args())) + flatten( + ([PYTHON, "-m", "aries_cloudagent", "start"], self.get_agent_args()) + ) ) async def start_process(self, python_path: str = None, wait: bool = True): @@ -1111,7 +1123,9 @@ async def admin_GET( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] + headers["Authorization"] = ( + "Bearer " + self.managed_wallet_params["token"] + ) response = await self.admin_request( "GET", path, None, text, params, headers=headers ) @@ -1163,7 +1177,9 @@ async def admin_POST( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] + headers["Authorization"] = ( + "Bearer " + self.managed_wallet_params["token"] + ) response = await self.admin_request( "POST", path, data, text, params, headers=headers ) @@ -1184,7 +1200,9 @@ async def admin_PATCH( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] + headers["Authorization"] = ( + "Bearer " + self.managed_wallet_params["token"] + ) return await self.admin_request( "PATCH", path, data, text, params, headers=headers ) @@ -1199,7 +1217,9 @@ async def admin_PUT( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] + headers["Authorization"] = ( + "Bearer " + self.managed_wallet_params["token"] + ) return await self.admin_request( "PUT", path, data, text, params, headers=headers ) @@ -1219,7 +1239,9 @@ async def admin_DELETE( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] + headers["Authorization"] = ( + "Bearer " + self.managed_wallet_params["token"] + ) response = await self.admin_request( "DELETE", path, data, text, params, headers=headers ) @@ -1238,7 +1260,9 @@ async def admin_GET_FILE(self, path, params=None, headers=None) -> bytes: if self.multitenant: if not headers: headers = {} - headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] + headers["Authorization"] = ( + "Bearer " + self.managed_wallet_params["token"] + ) params = {k: v for (k, v) in (params or {}).items() if v is not None} resp = await self.client_session.request( "GET", self.admin_url + path, params=params, headers=headers @@ -1254,7 +1278,9 @@ async def admin_PUT_FILE(self, files, url, params=None, headers=None) -> bytes: if self.multitenant: if not headers: headers = {} - headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] + headers["Authorization"] = ( + "Bearer " + self.managed_wallet_params["token"] + ) params = {k: v for (k, v) in (params or {}).items() if v is not None} resp = await self.client_session.request( "PUT", url, params=params, data=files, headers=headers @@ -1626,7 +1652,9 @@ async def handle_connections(self, message): # setup endorser meta-data on our connection log_msg("Setup endorser agent meta-data ...") await self.admin_POST( - "/transactions/" + self.endorser_connection_id + "/set-endorser-role", + "/transactions/" + + self.endorser_connection_id + + "/set-endorser-role", params={"transaction_my_job": "TRANSACTION_ENDORSER"}, ) From dc776d577a6c82d22f15c31f65304a0044715ebf Mon Sep 17 00:00:00 2001 From: tra371 Date: Wed, 28 Feb 2024 22:58:01 +0630 Subject: [PATCH 57/69] feat: add cred-type vc_di to demo runners Signed-off-by: tra371 --- demo/runners/performance.py | 11 +++++++++++ demo/runners/support/agent.py | 22 ++++++++++++++++++++-- 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/demo/runners/performance.py b/demo/runners/performance.py index 4a0794caa8..194284b910 100644 --- a/demo/runners/performance.py +++ b/demo/runners/performance.py @@ -179,6 +179,7 @@ def __init__( log_file: str = None, log_config: str = None, log_level: str = None, + cred_type: str = None, **kwargs, ): super().__init__( @@ -188,6 +189,7 @@ def __init__( log_file=log_file, log_config=log_config, log_level=log_level, + cred_type=cred_type, **kwargs, ) self.extra_args = [ @@ -330,6 +332,7 @@ async def main( log_file: str = None, log_config: str = None, log_level: str = None, + cred_type: str = None, ): if multi_ledger: genesis = None @@ -377,6 +380,7 @@ async def main( log_file=log_file, log_config=log_config, log_level=log_level, + cred_type=cred_type, ) await faber.listen_webhooks(start_port + 5) await faber.register_did() @@ -760,6 +764,13 @@ async def check_received_pings(agent, issue_count, pb): "('debug', 'info', 'warning', 'error', 'critical')" ), ) + parser.add_argument( + "--cred-type", + type=str, + metavar="", + default=None, + help=("Specifyng the credential type"), + ) args = parser.parse_args() if args.did_exchange and args.mediation: diff --git a/demo/runners/support/agent.py b/demo/runners/support/agent.py index 35436478d3..d800341221 100644 --- a/demo/runners/support/agent.py +++ b/demo/runners/support/agent.py @@ -674,7 +674,7 @@ async def register_did( did: str = None, verkey: str = None, role: str = "TRUST_ANCHOR", - cred_type: str = CRED_FORMAT_INDY, + cred_type: str = CRED_FORMAT_INDY or CRED_FORMAT_VC_DI, ): if cred_type in [CRED_FORMAT_INDY, CRED_FORMAT_VC_DI]: # if registering a did for issuing indy credentials, publish the did on the ledger @@ -732,7 +732,7 @@ async def register_or_switch_wallet( public_did=False, webhook_port: int = None, mediator_agent=None, - cred_type: str = CRED_FORMAT_INDY, + cred_type: str = CRED_FORMAT_INDY or CRED_FORMAT_VC_DI, endorser_agent=None, taa_accept=False, ): @@ -815,6 +815,23 @@ async def register_or_switch_wallet( await self.register_did( did=new_did["result"]["did"], verkey=new_did["result"]["verkey"], + cred_type=CRED_FORMAT_INDY, + ) + if self.endorser_role and self.endorser_role == "author": + if endorser_agent: + await self.admin_POST("/wallet/did/public?did=" + self.did) + await asyncio.sleep(3.0) + else: + await self.admin_POST("/wallet/did/public?did=" + self.did) + await asyncio.sleep(3.0) + elif cred_type == CRED_FORMAT_VC_DI: + # assign public did + new_did = await self.admin_POST("/wallet/did/create") + self.did = new_did["result"]["did"] + await self.register_did( + did=new_did["result"]["did"], + verkey=new_did["result"]["verkey"], + cred_type=CRED_FORMAT_VC_DI, ) if self.endorser_role and self.endorser_role == "author": if endorser_agent: @@ -1678,6 +1695,7 @@ async def start_endorser_agent( genesis_data=genesis, genesis_txn_list=genesis_txn_list, ) + # await endorser_agent.register_did(cred_type=CRED_FORMAT_INDY) await endorser_agent.register_did(cred_type=CRED_FORMAT_INDY) await endorser_agent.listen_webhooks(start_port + 2) await endorser_agent.start_process() From 559231a313768ea4e1c08e92920d2a1446c5cf32 Mon Sep 17 00:00:00 2001 From: tra371 Date: Tue, 5 Mar 2024 00:15:39 +0630 Subject: [PATCH 58/69] feat: add vc_di to V20CredFilter Signed-off-by: tra371 --- .../protocols/issue_credential/v2_0/routes.py | 58 ++++++++++++++++++- 1 file changed, 55 insertions(+), 3 deletions(-) diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/routes.py b/aries_cloudagent/protocols/issue_credential/v2_0/routes.py index 1341ae4dc2..7188df85ce 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/routes.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/routes.py @@ -12,6 +12,9 @@ request_schema, response_schema, ) +from aries_cloudagent.protocols.issue_credential.v2_0.models.detail.vc_di import ( + V20CredExRecordVCDISchema, +) from marshmallow import ValidationError, fields, validate, validates_schema from ....admin.request_context import AdminRequestContext @@ -108,6 +111,7 @@ class V20CredExRecordDetailSchema(OpenAPISchema): indy = fields.Nested(V20CredExRecordIndySchema, required=False) ld_proof = fields.Nested(V20CredExRecordLDProofSchema, required=False) + vc_di = fields.Nested(V20CredExRecordVCDISchema, required=False) class V20CredExRecordListResultSchema(OpenAPISchema): @@ -169,6 +173,46 @@ class V20CredFilterIndySchema(OpenAPISchema): ) +class V20CredFilterVCDISchema(OpenAPISchema): + """VCDI credential filtration criteria.""" + + cred_def_id = fields.Str( + required=False, + validate=INDY_CRED_DEF_ID_VALIDATE, + metadata={ + "description": "Credential definition identifier", + "example": INDY_CRED_DEF_ID_EXAMPLE, + }, + ) + schema_id = fields.Str( + required=False, + validate=INDY_SCHEMA_ID_VALIDATE, + metadata={ + "description": "Schema identifier", + "example": INDY_SCHEMA_ID_EXAMPLE, + }, + ) + schema_issuer_did = fields.Str( + required=False, + validate=INDY_DID_VALIDATE, + metadata={"description": "Schema issuer DID", "example": INDY_DID_EXAMPLE}, + ) + schema_name = fields.Str( + required=False, + metadata={"description": "Schema name", "example": "preferences"}, + ) + schema_version = fields.Str( + required=False, + validate=INDY_VERSION_VALIDATE, + metadata={"description": "Schema version", "example": INDY_VERSION_EXAMPLE}, + ) + issuer_did = fields.Str( + required=False, + validate=INDY_DID_VALIDATE, + metadata={"description": "Credential issuer DID", "example": INDY_DID_EXAMPLE}, + ) + + class V20CredFilterSchema(OpenAPISchema): """Credential filtration criteria.""" @@ -183,6 +227,12 @@ class V20CredFilterSchema(OpenAPISchema): metadata={"description": "Credential filter for linked data proof"}, ) + vc_di = fields.Nested( + V20CredFilterVCDISchema, + required=False, + metadata={"description": "Credential filter for vc_di"}, + ) + @validates_schema def validate_fields(self, data, **kwargs): """Validate schema fields. @@ -198,7 +248,7 @@ def validate_fields(self, data, **kwargs): """ if not any(f.api in data for f in V20CredFormat.Format): raise ValidationError( - "V20CredFilterSchema requires indy, ld_proof, or both" + "V20CredFilterSchema requires indy, ld_proof, vc_di or all" ) @@ -241,9 +291,11 @@ class V20IssueCredSchemaCore(AdminAPIMessageTracingSchema): def validate(self, data, **kwargs): """Make sure preview is present when indy format is present.""" - if data.get("filter", {}).get("indy") and not data.get("credential_preview"): + if ( + data.get("filter", {}).get("indy") or data.get("filter", {}).get("vc_di") + ) and not data.get("credential_preview"): raise ValidationError( - "Credential preview is required if indy filter is present" + "Credential preview is required if indy or vc_di filter is present" ) From 3977fe595d7089004f3f178c6905b291fda8bc91 Mon Sep 17 00:00:00 2001 From: tra371 Date: Tue, 5 Mar 2024 00:51:59 +0630 Subject: [PATCH 59/69] fix: modify agent container and faber agent Signed-off-by: tra371 --- demo/runners/agent_container.py | 2 ++ demo/runners/faber.py | 6 ++++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/demo/runners/agent_container.py b/demo/runners/agent_container.py index d7f4f3c09d..dbdb7a772e 100644 --- a/demo/runners/agent_container.py +++ b/demo/runners/agent_container.py @@ -1472,11 +1472,13 @@ async def create_agent_with_args(args, ident: str = None, extra_args: list = Non if "cred_type" in args and args.cred_type not in [ CRED_FORMAT_INDY, + CRED_FORMAT_VC_DI, ]: public_did = None aip = 20 elif "cred_type" in args and args.cred_type in [ CRED_FORMAT_INDY, + CRED_FORMAT_VC_DI, ]: public_did = True else: diff --git a/demo/runners/faber.py b/demo/runners/faber.py index 2de31cb06d..bb9d9e9a45 100644 --- a/demo/runners/faber.py +++ b/demo/runners/faber.py @@ -523,7 +523,7 @@ async def main(args): "birthdate_dateint", "timestamp", ] - if faber_agent.cred_type == CRED_FORMAT_INDY: + if faber_agent.cred_type in [CRED_FORMAT_INDY, CRED_FORMAT_VC_DI]: faber_agent.public_did = True await faber_agent.initialize( the_agent=agent, @@ -535,7 +535,9 @@ async def main(args): else False ), ) - elif faber_agent.cred_type in [CRED_FORMAT_JSON_LD, CRED_FORMAT_VC_DI]: + elif faber_agent.cred_type in [ + CRED_FORMAT_JSON_LD, + ]: faber_agent.public_did = True await faber_agent.initialize(the_agent=agent) else: From b9f97768b510380e10a31ff77c3bcdf10c63a301 Mon Sep 17 00:00:00 2001 From: Golda Velez Date: Tue, 5 Mar 2024 08:24:22 -0700 Subject: [PATCH 60/69] add type --- demo/runners/agent_container.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/demo/runners/agent_container.py b/demo/runners/agent_container.py index d7f4f3c09d..d0803dd8d9 100644 --- a/demo/runners/agent_container.py +++ b/demo/runners/agent_container.py @@ -697,7 +697,7 @@ def __init__( genesis_txns: str = None, genesis_txn_list: str = None, tails_server_base_url: str = None, - cred_type: str = CRED_FORMAT_INDY, + cred_type: str = CRED_FORMAT_INDY or CRED_FORMAT_VC_DI, show_timing: bool = False, multitenant: bool = False, mediation: bool = False, From be2765563d683439b9bafe52f87d4b251aec7d23 Mon Sep 17 00:00:00 2001 From: Stephen Curran Date: Tue, 5 Mar 2024 15:05:34 -0800 Subject: [PATCH 61/69] 0.12.0rc2 Signed-off-by: Stephen Curran --- .github/workflows/publish-docs.yml | 2 + CHANGELOG.md | 81 ++++++++++++++--------- docs/demo/AliceWantsAJsonCredential.md | 2 +- docs/demo/AriesOpenAPIDemo.md | 4 +- docs/features/DIDResolution.md | 2 +- docs/features/Multitenancy.md | 4 +- docs/features/SupportedRFCs.md | 2 +- docs/features/devcontainer.md | 2 +- docs/generated/aries_cloudagent.utils.rst | 8 +++ mkdocs.yml | 4 +- open-api/openapi.json | 4 +- open-api/swagger.json | 4 +- pyproject.toml | 2 +- 13 files changed, 73 insertions(+), 48 deletions(-) diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index 4b36c5ba22..3681684604 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -39,6 +39,8 @@ jobs: VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,' -e 's/^docs-v//') # Copy all of the root level md files into the docs folder for deployment, tweaking the relative paths for i in *.md; do sed -e "s#docs/#./#g" $i >docs/$i; done + # Fix references in DevReadMe.md to moved files + sed -e "s#\.\./\.\./#../#" docs/features/DevReadMe.md >tmp.md; mv tmp.md docs/features/DevReadMe.md # Populate overrides for the current version, and then remove to not apply if VERSION is main branch OVERRIDE=overrides/main.html echo -e "{% extends \"base.html\" %}\n\n{% block outdated %}\n You are viewing the documentation for ACA-Py Release $ALIAS.\n{% endblock %}" >$OVERRIDE diff --git a/CHANGELOG.md b/CHANGELOG.md index 45c584ede3..89623d92ca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,39 +1,42 @@ # Aries Cloud Agent Python Changelog -## 0.12.0rc1 +## 0.12.0rc2 -### February 17, 2024 +### March 5, 2024 Release 0.12.0 is a relative large release but currently with no breaking changes. We expect there will be breaking changes (at least in the handling of endorsement) before the 0.12.0 release is finalized, hence the minor version update. -The first `rc0` release candidate `rc0` introduced a regression via [PR \#2705] that has been reverted in [PR \#2789]. Further investigation is needed to determine how to accomplish the goal of [PR \#2705] ("feat: inject profile") without the regression. +The `rc0` release candidate introduced a regression via [PR \#2705] that has been reverted in `rc1` and later via [PR \#2789]. Further investigation is needed to determine how to accomplish the goal of [PR \#2705] ("feat: inject profile") without the regression. The `rc2` +and later releases address a regression related to the sending of a revocation notification from the issuer to the holder of a newly +revoked credential, fixed in [PR \#2814] +[PR \#2814]: https://github.com/hyperledger/aries-cloudagent-python/pull/2705 [PR \#2705]: https://github.com/hyperledger/aries-cloudagent-python/pull/2705 [PR \#2789]: https://github.com/hyperledger/aries-cloudagent-python/pull/2789 -Much progress was made on `did:peer` support in this release, with the handling of inbound [DID Peer] 1 added, and inbound and outbound support for DID Peer 2 and 4. The goal of that work is to eliminate the remaining places where "unqualified" DIDs remain. Work continues in supporting ledger agnostic [AnonCreds], and the new [Hyperledger AnonCreds Rust] library. Attention was also given in the release to the handling of JSON-LD [Data Integrity Verifiable Credentials], with more expected before the release is finalized. In addition to those updates, there were fixes and improvements across the codebase. +Much progress has been made on `did:peer` support in this release, with the handling of inbound [DID Peer] 1 added, and inbound and outbound support for DID Peer 2 and 4. The goal of that work is to eliminate the remaining places where "unqualified" DIDs remain, and to enable the "connection reuse" in the Out of Band protocol when using DID Peer 2 and 4 DIDs. Work continues in supporting ledger agnostic [AnonCreds], and the new [Hyperledger AnonCreds Rust] library. Attention was also given in the release to the handling of JSON-LD [Data Integrity Verifiable Credentials], with more expected before the release is finalized. In addition to those updates, there were fixes and improvements across the codebase. The most visible change in this release is the re-organization of the ACA-Py documentation, moving the vast majority of the documents to the folders within the `docs` folder -- a long overdue change that will allow us to soon publish the documents on [https://aca-py.org](https://aca-py.org) directly from the ACA-Py repository, rather than from the separate [aries-acapy-docs](https://github.com/hyperledger/aries-acapy-docs) currently being used. -A big developer improvement is a revampling of the test handling to eliminate ~2500 warnings that were previously generated in the test suite. Nice job [@ff137](https://github.com/ff137)! +A big developer improvement is a revamping of the test handling to eliminate ~2500 warnings that were previously generated in the test suite. Nice job [@ff137](https://github.com/ff137)! [DID Peer]: https://identity.foundation/peer-did-method-spec/ [AnonCreds]: https://www.hyperledger.org/projects/anoncreds [Hyperledger AnonCreds Rust]: https://github.com/hyperledger/anoncreds-rs [Data Integrity Verifiable Credentials]: https://www.w3.org/TR/vc-data-integrity/ -### 0.12.0rc1 Breaking Changes +### 0.12.0rc2 Breaking Changes -There are no breaking changes in 0.12.0rc1. +There are no breaking changes in 0.12.0rc2. - + -#### 0.12.0rc1 Categorized List of Pull Requests +#### 0.12.0rc2 Categorized List of Pull Requests - DID Handling and Connection Establishment Updates/Fixes + - Author subwallet setup automation [\#2791](https://github.com/hyperledger/aries-cloudagent-python/pull/2791) [jamshale](https://github.com/jamshale) - fix: save multi_use to the DB for OOB invitations [\#2694](https://github.com/hyperledger/aries-cloudagent-python/pull/2694) [frostyfrog](https://github.com/frostyfrog) - Connection and DIDX Problem Reports [\#2653](https://github.com/hyperledger/aries-cloudagent-python/pull/2653) [usingtechnology](https://github.com/usingtechnology) - - Feat: DIDX Implicit Request auto-accept and Delete OOB Invitation related records [\#2642](https://github.com/hyperledger/aries-cloudagent-python/pull/2642) [shaangill025](https://github.com/shaangill025) - DID Peer and DID Resolver Updates and Fixes - Integration test for did:peer [\#2713](https://github.com/hyperledger/aries-cloudagent-python/pull/2713) [ianco](https://github.com/ianco) @@ -45,6 +48,9 @@ There are no breaking changes in 0.12.0rc1. - feat: support resolving did:peer:1 received in did exchange [\#2611](https://github.com/hyperledger/aries-cloudagent-python/pull/2611) [dbluhm](https://github.com/dbluhm) - Ledger Agnostic AnonCreds RS Changes + - Send revocation list instead of rev_list object - Anoncreds [\#2821](https://github.com/hyperledger/aries-cloudagent-python/pull/2821) [jamshale](https://github.com/jamshale) + - Fix anoncreds non-endorsement revocation [\#2814](https://github.com/hyperledger/aries-cloudagent-python/pull/2814) [jamshale](https://github.com/jamshale) - Get and create anoncreds profile when using anoncreds subwallet [\#2803](https://github.com/hyperledger/aries-cloudagent-python/pull/2803) [jamshale](https://github.com/jamshale) + - Add anoncreds multitenant endorsement integration tests [\#2801](https://github.com/hyperledger/aries-cloudagent-python/pull/2801) [jamshale](https://github.com/jamshale) - Anoncreds revoke and publish-revocations endorsement [\#2782](https://github.com/hyperledger/aries-cloudagent-python/pull/2782) [jamshale](https://github.com/jamshale) - Upgrade anoncreds to version 0.2.0-dev11 [\#2763](https://github.com/hyperledger/aries-cloudagent-python/pull/2763) [jamshale](https://github.com/jamshale) - Update anoncreds to 0.2.0-dev10 [\#2758](https://github.com/hyperledger/aries-cloudagent-python/pull/2758) [jamshale](https://github.com/jamshale) @@ -79,6 +85,7 @@ There are no breaking changes in 0.12.0rc1. - refactor: make ldp_vc logic reusable [\#2533](https://github.com/hyperledger/aries-cloudagent-python/pull/2533) [dbluhm](https://github.com/dbluhm) - Credential Exchange (Issue, Present) Updates + - Allow for crids in event payload to be integers [\#2819](https://github.com/hyperledger/aries-cloudagent-python/pull/2819) [jamshale](https://github.com/jamshale) - Create revocation notification after list entry written to ledger [\#2812](https://github.com/hyperledger/aries-cloudagent-python/pull/2812) [jamshale](https://github.com/jamshale) - Remove exception on connectionless presentation problem report handler [\#2723](https://github.com/hyperledger/aries-cloudagent-python/pull/2723) [loneil](https://github.com/loneil) - Ensure "preserve_exchange_records" flags are set. [\#2664](https://github.com/hyperledger/aries-cloudagent-python/pull/2664) [usingtechnology](https://github.com/usingtechnology) - Slight improvement to credx proof validation error message [\#2655](https://github.com/hyperledger/aries-cloudagent-python/pull/2655) [ianco](https://github.com/ianco) @@ -89,6 +96,12 @@ There are no breaking changes in 0.12.0rc1. - Improve Per Tenant Logging: Fix issues around default log file path [\#2659](https://github.com/hyperledger/aries-cloudagent-python/pull/2659) [shaangill025](https://github.com/shaangill025) - Other Fixes, Demo, DevContainer and Documentation Fixes + - FIX: GHA update for doc publishing, fix doc file that was blanked [\#2820](https://github.com/hyperledger/aries-cloudagent-python/pull/2820) [swcurran](https://github.com/swcurran) + - More updates to get docs publishing [\#2810](https://github.com/hyperledger/aries-cloudagent-python/pull/2810) [swcurran](https://github.com/swcurran) + - Eliminate the double workflow event [\#2811](https://github.com/hyperledger/aries-cloudagent-python/pull/2811) [swcurran](https://github.com/swcurran) + - Publish docs GHActions tweak [\#2806](https://github.com/hyperledger/aries-cloudagent-python/pull/2806) [swcurran](https://github.com/swcurran) + - Update publish-docs to operate on main and on branches prefixed with docs-v [\#2804](https://github.com/hyperledger/aries-cloudagent-python/pull/2804) [swcurran](https://github.com/swcurran) + - Add index.html redirector to gh-pages branch [\#2802](https://github.com/hyperledger/aries-cloudagent-python/pull/2802) [swcurran](https://github.com/swcurran) - Demo description of reuse in establishing a connection [\#2787](https://github.com/hyperledger/aries-cloudagent-python/pull/2787) [swcurran](https://github.com/swcurran) - Reorganize the ACA-Py Documentation Files [\#2765](https://github.com/hyperledger/aries-cloudagent-python/pull/2765) [swcurran](https://github.com/swcurran) - Tweaks to MD files to enable aca-py.org publishing [\#2771](https://github.com/hyperledger/aries-cloudagent-python/pull/2771) [swcurran](https://github.com/swcurran) @@ -102,27 +115,29 @@ There are no breaking changes in 0.12.0rc1. - Update the ReadTheDocs config in case we do another 0.10.x release [\#2629](https://github.com/hyperledger/aries-cloudagent-python/pull/2629) [swcurran](https://github.com/swcurran) - Dependencies and Internal Updates + - bot chore(deps): Bump cryptography from 42.0.3 to 42.0.4 dependencies python [\#2805](https://github.com/hyperledger/aries-cloudagent-python/pull/2805) [dependabot](https://github.com/dependabot) + - bot chore(deps): Bump the all-actions group with 3 updates dependencies github_actions [\#2815](https://github.com/hyperledger/aries-cloudagent-python/pull/2815) [dependabot](https://github.com/dependabot) - Change middleware registration order [\#2796](https://github.com/hyperledger/aries-cloudagent-python/pull/2796) [PatStLouis](https://github.com/PatStLouis) - Bump pyld version to 2.0.4 [\#2795](https://github.com/hyperledger/aries-cloudagent-python/pull/2795) [PatStLouis](https://github.com/PatStLouis) - Revert profile inject [\#2789](https://github.com/hyperledger/aries-cloudagent-python/pull/2789) [jamshale](https://github.com/jamshale) - Move emit events to profile and delay sending until after commit [\#2760](https://github.com/hyperledger/aries-cloudagent-python/pull/2760) [ianco](https://github.com/ianco) - fix: partial revert of ConnRecord schema change 1.0.0 [\#2746](https://github.com/hyperledger/aries-cloudagent-python/pull/2746) [dbluhm](https://github.com/dbluhm) - - chore(deps): Bump aiohttp from 3.9.1 to 3.9.2 dependencies [\#2745](https://github.com/hyperledger/aries-cloudagent-python/pull/2745) [dependabot bot](https://github.com/dependabot bot) + - chore(deps): Bump aiohttp from 3.9.1 to 3.9.2 dependencies [\#2745](https://github.com/hyperledger/aries-cloudagent-python/pull/2745) [dependabot bot](https://github.com/dependabot) - bump pydid to v 0.4.3 [\#2737](https://github.com/hyperledger/aries-cloudagent-python/pull/2737) [PatStLouis](https://github.com/PatStLouis) - Fix subwallet record removal [\#2721](https://github.com/hyperledger/aries-cloudagent-python/pull/2721) [andrewwhitehead](https://github.com/andrewwhitehead) - - chore(deps): Bump jinja2 from 3.1.2 to 3.1.3 dependencies [\#2707](https://github.com/hyperledger/aries-cloudagent-python/pull/2707) [dependabot bot](https://github.com/dependabot bot) + - chore(deps): Bump jinja2 from 3.1.2 to 3.1.3 dependencies [\#2707](https://github.com/hyperledger/aries-cloudagent-python/pull/2707) [dependabot bot](https://github.com/dependabot) - feat: inject profile [\#2705](https://github.com/hyperledger/aries-cloudagent-python/pull/2705) [dbluhm](https://github.com/dbluhm) - Remove tiny-vim from being added to the container image to reduce reported vulnerabilities from scanning [\#2699](https://github.com/hyperledger/aries-cloudagent-python/pull/2699) [swcurran](https://github.com/swcurran) - - chore(deps): Bump jwcrypto from 1.5.0 to 1.5.1 dependencies [\#2689](https://github.com/hyperledger/aries-cloudagent-python/pull/2689) [dependabot bot](https://github.com/dependabot bot) + - chore(deps): Bump jwcrypto from 1.5.0 to 1.5.1 dependencies [\#2689](https://github.com/hyperledger/aries-cloudagent-python/pull/2689) [dependabot bot](https://github.com/dependabot) - Update dependencies [\#2686](https://github.com/hyperledger/aries-cloudagent-python/pull/2686) [andrewwhitehead](https://github.com/andrewwhitehead) - Fix: Change To Use Timezone Aware UTC datetime [\#2679](https://github.com/hyperledger/aries-cloudagent-python/pull/2679) [Ennovate-com](https://github.com/Ennovate-com) - fix: update broken demo dependency [\#2638](https://github.com/hyperledger/aries-cloudagent-python/pull/2638) [mrkaurelius](https://github.com/mrkaurelius) - - Bump cryptography from 41.0.5 to 41.0.6 dependencies [\#2636](https://github.com/hyperledger/aries-cloudagent-python/pull/2636) [dependabot bot](https://github.com/dependabot bot) - - Bump aiohttp from 3.8.6 to 3.9.0 dependencies [\#2635](https://github.com/hyperledger/aries-cloudagent-python/pull/2635) [dependabot bot](https://github.com/dependabot bot) + - Bump cryptography from 41.0.5 to 41.0.6 dependencies [\#2636](https://github.com/hyperledger/aries-cloudagent-python/pull/2636) [dependabot bot](https://github.com/dependabot) + - Bump aiohttp from 3.8.6 to 3.9.0 dependencies [\#2635](https://github.com/hyperledger/aries-cloudagent-python/pull/2635) [dependabot bot](https://github.com/dependabot) - CI/CD, Testing, and Developer Tools/Productivity Updates - Fix deprecation warnings [\#2756](https://github.com/hyperledger/aries-cloudagent-python/pull/2756) [ff137](https://github.com/ff137) - - chore(deps): Bump the all-actions group with 10 updates dependencies [\#2784](https://github.com/hyperledger/aries-cloudagent-python/pull/2784) [dependabot bot](https://github.com/dependabot bot) + - chore(deps): Bump the all-actions group with 10 updates dependencies [\#2784](https://github.com/hyperledger/aries-cloudagent-python/pull/2784) [dependabot bot](https://github.com/dependabot) - Add Dependabot configuration [\#2783](https://github.com/hyperledger/aries-cloudagent-python/pull/2783) [WadeBarnes](https://github.com/WadeBarnes) - Implement B006 rule [\#2775](https://github.com/hyperledger/aries-cloudagent-python/pull/2775) [jamshale](https://github.com/jamshale) - ⬆️ Upgrade pytest to 8.0 [\#2773](https://github.com/hyperledger/aries-cloudagent-python/pull/2773) [ff137](https://github.com/ff137) @@ -134,6 +149,7 @@ There are no breaking changes in 0.12.0rc1. - Update snyk workflow to execute on Pull Request [\#2658](https://github.com/hyperledger/aries-cloudagent-python/pull/2658) [usingtechnology](https://github.com/usingtechnology) - Release management pull requests + - 0.12.0rc2 [\#2825](https://github.com/hyperledger/aries-cloudagent-python/pull/2825) [swcurran](https://github.com/swcurran) - 0.12.0rc1 [\#2800](https://github.com/hyperledger/aries-cloudagent-python/pull/2800) [swcurran](https://github.com/swcurran) - 0.12.0rc1 [\#2799](https://github.com/hyperledger/aries-cloudagent-python/pull/2799) [swcurran](https://github.com/swcurran) - 0.12.0rc0 [\#2732](https://github.com/hyperledger/aries-cloudagent-python/pull/2732) [swcurran](https://github.com/swcurran) @@ -264,11 +280,11 @@ not be answered for a long time, and the holder responds after the delete. See - Dependencies and Internal Updates - chore: bump pydid version [\#2626](https://github.com/hyperledger/aries-cloudagent-python/pull/2626) [dbluhm](https://github.com/dbluhm) - chore: dependency updates [\#2565](https://github.com/hyperledger/aries-cloudagent-python/pull/2565) [dbluhm](https://github.com/dbluhm) - - chore(deps): Bump urllib3 from 2.0.6 to 2.0.7 dependencies [\#2552](https://github.com/hyperledger/aries-cloudagent-python/pull/2552) [dependabot bot](https://github.com/dependabot bot) - - chore(deps): Bump urllib3 from 2.0.6 to 2.0.7 in /demo/playground/scripts dependencies [\#2551](https://github.com/hyperledger/aries-cloudagent-python/pull/2551) [dependabot bot](https://github.com/dependabot bot) + - chore(deps): Bump urllib3 from 2.0.6 to 2.0.7 dependencies [\#2552](https://github.com/hyperledger/aries-cloudagent-python/pull/2552) [dependabot bot](https://github.com/dependabot) + - chore(deps): Bump urllib3 from 2.0.6 to 2.0.7 in /demo/playground/scripts dependencies [\#2551](https://github.com/hyperledger/aries-cloudagent-python/pull/2551) [dependabot bot](https://github.com/dependabot) - chore: update pydid [\#2527](https://github.com/hyperledger/aries-cloudagent-python/pull/2527) [dbluhm](https://github.com/dbluhm) - - chore(deps): Bump urllib3 from 2.0.5 to 2.0.6 dependencies [\#2525](https://github.com/hyperledger/aries-cloudagent-python/pull/2525) [dependabot bot](https://github.com/dependabot bot) - - chore(deps): Bump urllib3 from 2.0.2 to 2.0.6 in /demo/playground/scripts dependencies [\#2524](https://github.com/hyperledger/aries-cloudagent-python/pull/2524) [dependabot bot](https://github.com/dependabot bot) + - chore(deps): Bump urllib3 from 2.0.5 to 2.0.6 dependencies [\#2525](https://github.com/hyperledger/aries-cloudagent-python/pull/2525) [dependabot bot](https://github.com/dependabot) + - chore(deps): Bump urllib3 from 2.0.2 to 2.0.6 in /demo/playground/scripts dependencies [\#2524](https://github.com/hyperledger/aries-cloudagent-python/pull/2524) [dependabot bot](https://github.com/dependabot) - Avoid multiple open wallet connections [\#2521](https://github.com/hyperledger/aries-cloudagent-python/pull/2521) [andrewwhitehead](https://github.com/andrewwhitehead) - Remove unused dependencies [\#2510](https://github.com/hyperledger/aries-cloudagent-python/pull/2510) [andrewwhitehead](https://github.com/andrewwhitehead) - Use correct rust log level in dockerfiles [\#2499](https://github.com/hyperledger/aries-cloudagent-python/pull/2499) [loneil](https://github.com/loneil) @@ -320,7 +336,7 @@ details in the PR and [Issue \#2531 Routing for agents behind a aca-py based med Thanks to [codespree](https://github.com/codespree) for raising the issue and providing the fix. [Aries Framework Kotlin](https://github.com/hyperledger/aries-framework-kotlin) -[Issue \#2531 Routing for agents behind a aca-py based mediator is broken]: https://github.com/hyperledger/aries-cloudagent-python/issue/2531 +[Issue \#2531 Routing for agents behind a aca-py based mediator is broken]: [\#2531](https://github.com/hyperledger/aries-cloudagent-python/issue/2531) #### 0.10.4 Categorized List of Pull Requests @@ -487,7 +503,7 @@ deleted from ACA-Py storage. - Corrected typo on mediator invitation configuration argument [\#2365](https://github.com/hyperledger/aries-cloudagent-python/pull/2365) [jorgefl0](https://github.com/jorgefl0) - Add workaround for ARM based macs [\#2313](https://github.com/hyperledger/aries-cloudagent-python/pull/2313) [finnformica](https://github.com/finnformica) - Dependencies and Internal Updates - - chore(deps): Bump certifi from 2023.5.7 to 2023.7.22 in /demo/playground/scripts dependencies [\#2354](https://github.com/hyperledger/aries-cloudagent-python/pull/2354) [dependabot bot](https://github.com/dependabot bot) + - chore(deps): Bump certifi from 2023.5.7 to 2023.7.22 in /demo/playground/scripts dependencies [\#2354](https://github.com/hyperledger/aries-cloudagent-python/pull/2354) [dependabot bot](https://github.com/dependabot) - CI/CD and Developer Tools/Productivity Updates - Fix for nightly tests failing on Python 3.10 [\#2435](https://github.com/hyperledger/aries-cloudagent-python/pull/2435) [Gavinok](https://github.com/Gavinok) - Don't run Snyk on forks [\#2429](https://github.com/hyperledger/aries-cloudagent-python/pull/2429) [ryjones](https://github.com/ryjones) @@ -610,7 +626,7 @@ about changes you might need to make to your deployment. - upgrade requests to latest [\#2336](https://github.com/hyperledger/aries-cloudagent-python/pull/2336) [ff137](https://github.com/ff137) - upgrade packaging to latest [\#2334](https://github.com/hyperledger/aries-cloudagent-python/pull/2334) [ff137](https://github.com/ff137) - chore: update PyYAML [\#2329](https://github.com/hyperledger/aries-cloudagent-python/pull/2329) [dbluhm](https://github.com/dbluhm) - - chore(deps): Bump aiohttp from 3.8.4 to 3.8.5 in /demo/playground/scripts dependencies [\#2325](https://github.com/hyperledger/aries-cloudagent-python/pull/2325) [dependabot bot](https://github.com/dependabot bot) + - chore(deps): Bump aiohttp from 3.8.4 to 3.8.5 in /demo/playground/scripts dependencies [\#2325](https://github.com/hyperledger/aries-cloudagent-python/pull/2325) [dependabot bot](https://github.com/dependabot) - ⬆️ upgrade marshmallow to latest [\#2322](https://github.com/hyperledger/aries-cloudagent-python/pull/2322) [ff137](https://github.com/ff137) - fix: use python 3.9 in run_docker [\#2291](https://github.com/hyperledger/aries-cloudagent-python/pull/2291) [dbluhm](https://github.com/dbluhm) - BREAKING!: drop python 3.6 support [\#2247](https://github.com/hyperledger/aries-cloudagent-python/pull/2247) [dbluhm](https://github.com/dbluhm) @@ -733,7 +749,7 @@ We have also noted that in some container orchestration environments such as install correctly in other environments (such as in `docker compose` setups). [\#2116]: https://github.com/hyperledger/aries-cloudagent-python/issues/2116 -[Upgrading ACA-Py]: ./UpgradingACA-Py.md +[Upgrading ACA-Py]: docs/deploying/UpgradingACA-Py.md [Issue #2201]: https://github.com/hyperledger/aries-cloudagent-python/issues/2201 [Aries Askar]: https://github.com/hyperledger/aries-askar [Red Hat's OpenShift]: https://www.openshift.com/products/container-platform/ @@ -876,7 +892,7 @@ ACA-Py, adding you wallet settings: #### Categorized List of Pull Requests - Verifiable credential, presentation and revocation handling updates - - **BREAKING:** Update webhook message to terse form [default, added startup flag --debug-webhooks for full form [\#2145](https://github.com/hyperledger/aries-cloudagent-python/pull/2145) by [victorlee0505](victorlee0505) + - **BREAKING:** Update webhook message to terse form [default, added startup flag --debug-webhooks for full form [\#2145](https://github.com/hyperledger/aries-cloudagent-python/pull/2145) by [victorlee0505](https://github.com/victorlee0505) - Add startup flag --light-weight-webhook to trim down outbound webhook payload [\#1941](https://github.com/hyperledger/aries-cloudagent-python/pull/1941) [victorlee0505](https://github.com/victorlee0505) - feat: add verification method issue-credentials-2.0/send endpoint [\#2135](https://github.com/hyperledger/aries-cloudagent-python/pull/2135) [chumbert](https://github.com/chumbert) - Respect auto-verify-presentation flag in present proof v1 and v2 [\#2097](https://github.com/hyperledger/aries-cloudagent-python/pull/2097) [dbluhm](https://github.com/dbluhm) @@ -940,7 +956,7 @@ ACA-Py, adding you wallet settings: - Delete tail files [\#2103](https://github.com/hyperledger/aries-cloudagent-python/pull/2103) [ramreddychalla94](https://github.com/ramreddychalla94) - Startup Command Line / Environment / YAML Parameter Updates - - Update webhook message to terse form [default, added startup flag --debug-webhooks for full form [\#2145](https://github.com/hyperledger/aries-cloudagent-python/pull/2145) by [victorlee0505](victorlee0505) + - Update webhook message to terse form [default, added startup flag --debug-webhooks for full form [\#2145](https://github.com/hyperledger/aries-cloudagent-python/pull/2145) by [victorlee0505](https://github.com/victorlee0505) - Add startup flag --light-weight-webhook to trim down outbound webhook payload [\#1941](https://github.com/hyperledger/aries-cloudagent-python/pull/1941) [victorlee0505](https://github.com/victorlee0505) - Add missing --mediator-connections-invite cmd arg info to docs [\#2051](https://github.com/hyperledger/aries-cloudagent-python/pull/2051) ([matrixik](https://github.com/matrixik)) - Issue \#2068 boolean flag change to support HEAD requests to default route [\#2077](https://github.com/hyperledger/aries-cloudagent-python/pull/2077) ([johnekent](https://github.com/johnekent)) @@ -967,7 +983,7 @@ ACA-Py, adding you wallet settings: - Fix: SchemasInputDescriptorFilter: broken deserialization renders generated clients unusable [\#1894](https://github.com/hyperledger/aries-cloudagent-python/pull/1894) ([rmnre](https://github.com/rmnre)) - fix: schema class can set Meta.unknown [\#1885](https://github.com/hyperledger/aries-cloudagent-python/pull/1885) ([dbluhm](https://github.com/dbluhm)) -- Unit, Integration, and Aries Agent Test Harness Test updates +- Unit, Integration, and Aries Agent Test Harness Test updates - Additional integration tests for revocation scenarios [\#2055](https://github.com/hyperledger/aries-cloudagent-python/pull/2055) ([ianco](https://github.com/ianco)) - Previously flagged in release 1.0.0-rc1 - Fixes a few AATH failures [\#1897](https://github.com/hyperledger/aries-cloudagent-python/pull/1897) ([ianco](https://github.com/ianco)) @@ -998,7 +1014,7 @@ ACA-Py, adding you wallet settings: - [fix] Removes extra comma that prevents swagger from accepting the presentation request [\#2149](https://github.com/hyperledger/aries-cloudagent-python/pull/2149) [swcurran](https://github.com/swcurran) - Initial plugin docs [\#2138](https://github.com/hyperledger/aries-cloudagent-python/pull/2138) [ianco](https://github.com/ianco) - Acme workshop [\#2137](https://github.com/hyperledger/aries-cloudagent-python/pull/2137) [ianco](https://github.com/ianco) - - Fix: Performance Demo [no --revocation] [\#2151](https://github.com/ hyperledger/aries-cloudagent-python/pull/2151) [shaangill025](https://github.com/shaangill025) + - Fix: Performance Demo [no --revocation] [\#2151](https://github.com/hyperledger/aries-cloudagent-python/pull/2151) [shaangill025](https://github.com/shaangill025) - Fix typos in alice-local.sh & faber-local.sh [\#2010](https://github.com/hyperledger/aries-cloudagent-python/pull/2010) ([naonishijima](https://github.com/naonishijima)) - Added a bit about manually creating a revoc reg tails file [\#2012](https://github.com/hyperledger/aries-cloudagent-python/pull/2012) ([ianco](https://github.com/ianco)) - Add ability to set docker container name [\#2024](https://github.com/hyperledger/aries-cloudagent-python/pull/2024) ([matrixik](https://github.com/matrixik)) @@ -1296,7 +1312,7 @@ AIP 2.0 RFCs for which support was added. The following is an annotated list of PRs in the release, including a link to each PR. - **AIP 2.0 Features** - - Discover Features Protocol: v1_0 refactoring and v2_0 implementation [[#1500](https://github.com/hyperledger/aries-cloudagent-python/pull/1500)](https://github.com/hyperledger/aries-cloudagent-python/pull/1500) + - Discover Features Protocol: v1_0 refactoring and v2_0 implementation [#1500](https://github.com/hyperledger/aries-cloudagent-python/pull/1500) - Updates the Discover Features 1.0 (AIP 1.0) implementation and implements the new 2.0 version. In doing so, adds generalized support for goal codes to ACA-Py. - fix DiscoveryExchangeRecord RECORD_TOPIC typo fix [#1566](https://github.com/hyperledger/aries-cloudagent-python/pull/1566) - Implement Revocation Notification v1.0 [#1464](https://github.com/hyperledger/aries-cloudagent-python/pull/1464) @@ -1368,7 +1384,7 @@ With usage in the field increasing, we're cleaning up edge cases and issues rela The most significant new feature for users of Indy ledgers is a simplified approach for transaction authors getting their transactions signed by an endorser. Transaction author controllers now do almost nothing other than configuring their instance to use an Endorser, -and ACA-Py takes care of the rest. Documentation of that feature is [here](Endorser.md). +and ACA-Py takes care of the rest. Documentation of that feature is [here](docs/features/Endorser.md). - Improve cloud native deployments/scaling - unprotect liveness and readiness endpoints [#1416](https://github.com/hyperledger/aries-cloudagent-python/pull/1416) @@ -1451,7 +1467,6 @@ Includes some cleanups of JSON-LD Verifiable Credentials and Verifiable Presenta - fix: error on deserializing conn record with protocol ([#1325](https://github.com/hyperledger/aries-cloudagent-python/pull/1325)) - fix: failure to verify jsonld on non-conformant doc but vaild vmethod ([#1301](https://github.com/hyperledger/aries-cloudagent-python/pull/1301)) - fix: allow underscore in endpoints ([#1378](https://github.com/hyperledger/aries-cloudagent-python/pull/1378)) - ## 0.7.0 @@ -1481,11 +1496,11 @@ This is a significant release of ACA-Py with several new features, as well as ch #### Mediator support -While ACA-Py had previous support for a basic routing protocol, this was never fully developed or used in practice. Starting with this release, inbound and outbound connections can be established through a mediator agent using the Aries (Mediator Coordination Protocol)[https://github.com/hyperledger/aries-rfcs/tree/master/features/0211-route-coordination]. This work was initially contributed by Adam Burdett and Daniel Bluhm of [Indicio](https://indicio.tech/) on behalf of [SICPA](https://sicpa.com/). [Read more about mediation support](./Mediation.md). +While ACA-Py had previous support for a basic routing protocol, this was never fully developed or used in practice. Starting with this release, inbound and outbound connections can be established through a mediator agent using the Aries [Mediator Coordination Protocol](https://github.com/hyperledger/aries-rfcs/tree/master/features/0211-route-coordination). This work was initially contributed by Adam Burdett and Daniel Bluhm of [Indicio](https://indicio.tech/) on behalf of [SICPA](https://sicpa.com/). [Read more about mediation support](docs/features/Mediation.md). #### Multi-Tenancy support -Started by [BMW](https://bmw.com/) and completed by [Animo Solutions](https://animo.id/) and [Anon Solutions](https://anon-solutions.ca/) on behalf of [SICPA](https://sicpa.com/), this feature allows for a single ACA-Py instance to host multiple wallet instances. This can greatly reduce the resources required when many identities are being handled. [Read more about multi-tenancy support](./Multitenancy.md). +Started by [BMW](https://bmw.com/) and completed by [Animo Solutions](https://animo.id/) and [Anon Solutions](https://anon-solutions.ca/) on behalf of [SICPA](https://sicpa.com/), this feature allows for a single ACA-Py instance to host multiple wallet instances. This can greatly reduce the resources required when many identities are being handled. [Read more about multi-tenancy support](docs/features/Multitenancy.md). #### New connection protocol(s) diff --git a/docs/demo/AliceWantsAJsonCredential.md b/docs/demo/AliceWantsAJsonCredential.md index 6001c8b12e..5a7ed8f8ff 100644 --- a/docs/demo/AliceWantsAJsonCredential.md +++ b/docs/demo/AliceWantsAJsonCredential.md @@ -2,7 +2,7 @@ ACA-Py has the capability to issue and verify both Indy and JSON-LD (W3C compliant) credentials. -The JSON-LD support is documented [here](../JsonLdCredentials.md) - this document will provide some additional detail in how to use the demo and admin api to issue and prove JSON-LD credentials. +The JSON-LD support is documented [here](../features/JsonLdCredentials.md) - this document will provide some additional detail in how to use the demo and admin api to issue and prove JSON-LD credentials. ## Setup Agents to Issue JSON-LD Credentials diff --git a/docs/demo/AriesOpenAPIDemo.md b/docs/demo/AriesOpenAPIDemo.md index deed6abb5b..b43bb72737 100644 --- a/docs/demo/AriesOpenAPIDemo.md +++ b/docs/demo/AriesOpenAPIDemo.md @@ -581,7 +581,7 @@ Those that know something about the Indy process for issuing a credential and th If you would like to perform all of the issuance steps manually on the Faber agent side, use a sequence of the other `/issue-credential-2.0/` messages. Use the **`GET /issue-credential-2.0/records`** to both check the credential exchange state as you progress through the protocol and to find some of the data you’ll need in executing the sequence of requests. -The following table lists endpoints that you need to call ("REST service") and callbacks that your agent will receive ("callback") that your need to respond to. See the [detailed API docs](../features/AdminAPI). +The following table lists endpoints that you need to call ("REST service") and callbacks that your agent will receive ("callback") that your need to respond to. See the [detailed API docs](../features/AdminAPI.md). | Protocol Step | Faber (Issuer) | Alice (Holder) | Notes | | -------------------- | ---------------------- | ------------------ | ----- | @@ -700,7 +700,7 @@ As with the issue credential process, the agents handled some of the presentatio If you would like to perform all of the proof request/response steps manually, you can call all of the individual `/present-proof-2.0` messages. -The following table lists endpoints that you need to call ("REST service") and callbacks that your agent will receive ("callback") that you need to respond to. See the [detailed API docs](../features/AdminAPI). +The following table lists endpoints that you need to call ("REST service") and callbacks that your agent will receive ("callback") that you need to respond to. See the [detailed API docs](../features/AdminAPI.md). | Protocol Step | Faber (Verifier) | Alice (Holder/Prover) | Notes | | -------------------- | ---------------------- | ------------------------- | ----- | diff --git a/docs/features/DIDResolution.md b/docs/features/DIDResolution.md index 30e8f9210a..6d5e9fc75e 100644 --- a/docs/features/DIDResolution.md +++ b/docs/features/DIDResolution.md @@ -176,7 +176,7 @@ plugin: The following is a fully functional Dockerfile encapsulating this setup: ```dockerfile= -FROM ghcr.io/hyperledger/aries-cloudagent-python:py3.9-0.12.0rc1 +FROM ghcr.io/hyperledger/aries-cloudagent-python:py3.9-0.12.0rc2 RUN pip3 install git+https://github.com/dbluhm/acapy-resolver-github CMD ["aca-py", "start", "-it", "http", "0.0.0.0", "3000", "-ot", "http", "-e", "http://localhost:3000", "--admin", "0.0.0.0", "3001", "--admin-insecure-mode", "--no-ledger", "--plugin", "acapy_resolver_github"] diff --git a/docs/features/Multitenancy.md b/docs/features/Multitenancy.md index 07b63c32b8..c85756068a 100644 --- a/docs/features/Multitenancy.md +++ b/docs/features/Multitenancy.md @@ -43,7 +43,7 @@ The wallets used by the different tenants are called **sub wallets**. A sub wall The **base wallet** however, takes on a different role and has limited functionality. Its main function is to manage the sub wallets, which can be done using the [Multi-tenant Admin API](#multi-tenant-admin-api). It stores all settings and information about the different sub wallets and will route incoming messages to the corresponding sub wallets. See [Message Routing](#message-routing) for more details. All other features are disabled for the base wallet. This means it cannot issue credentials, present proof, or do any of the other actions sub wallets can do. This is to keep a clear hierarchical difference between base and sub wallets. For this reason, the base wallet should generally _not_ be provisioned using the `--wallet-seed` argument as not only it is not necessary for sub wallet management operations, but it will also require this DID to be correctly registered on the ledger for the service to start-up correctly. -![Multi-tenancy Architecture](/docs/assets/multitenancyDiagram.png) +![Multi-tenancy Architecture](../assets/multitenancyDiagram.png) ### Usage @@ -301,7 +301,7 @@ For deterministic JWT creation and verification between restarts and multiple in When using the SwaggerUI you can click the :lock: icon next to each of the endpoints or the `Authorize` button at the top to set the correct authentication headers. Make sure to also include the `Bearer` part in the input field. This won't be automatically added. -![API Authentication](/docs/assets/adminApiAuthentication.png) +![API Authentication](../assets/adminApiAuthentication.png) ## Tenant Management diff --git a/docs/features/SupportedRFCs.md b/docs/features/SupportedRFCs.md index 022999fcf1..7bf280f27e 100644 --- a/docs/features/SupportedRFCs.md +++ b/docs/features/SupportedRFCs.md @@ -8,7 +8,7 @@ ACA-Py or the repository `main` branch. Reminders (and PRs!) to update this page welcome! If you have any questions, please contact us on the #aries channel on [Hyperledger Discord](https://discord.gg/hyperledger) or through an issue in this repo. -**Last Update**: 2024-02-17, Release 0.12.0rc1 +**Last Update**: 2024-03-05, Release 0.12.0rc2 > The checklist version of this document was created as a joint effort > between [Northern Block](https://northernblock.io/), [Animo Solutions](https://animo.id/) and the Ontario government, on behalf of the Ontario government. diff --git a/docs/features/devcontainer.md b/docs/features/devcontainer.md index 719d0ceae5..316b1fd60e 100644 --- a/docs/features/devcontainer.md +++ b/docs/features/devcontainer.md @@ -5,7 +5,7 @@ We provide a [`devcontainer`](https://containers.dev) and will use [`VS Code`](h By no means is ACA-Py limited to these tools; they are merely examples. -**For information on running demos and tests using provided shell scripts, see [DevReadMe](./DevReadMe) readme.** +**For information on running demos and tests using provided shell scripts, see [DevReadMe](./DevReadMe.md) readme.** ## Caveats diff --git a/docs/generated/aries_cloudagent.utils.rst b/docs/generated/aries_cloudagent.utils.rst index b551a25674..b231555d64 100644 --- a/docs/generated/aries_cloudagent.utils.rst +++ b/docs/generated/aries_cloudagent.utils.rst @@ -33,6 +33,14 @@ aries\_cloudagent.utils.dependencies module :undoc-members: :show-inheritance: +aries\_cloudagent.utils.endorsement\_setup module +------------------------------------------------- + +.. automodule:: aries_cloudagent.utils.endorsement_setup + :members: + :undoc-members: + :show-inheritance: + aries\_cloudagent.utils.env module ---------------------------------- diff --git a/mkdocs.yml b/mkdocs.yml index 1c9a9b0667..cc32631bc4 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -49,8 +49,8 @@ markdown_extensions: - pymdownx.caret - pymdownx.details - pymdownx.emoji: - emoji_generator: !!python/name:materialx.emoji.to_svg - emoji_index: !!python/name:materialx.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + emoji_index: !!python/name:material.extensions.emoji.twemoji - pymdownx.highlight: anchor_linenums: true - pymdownx.inlinehilite diff --git a/open-api/openapi.json b/open-api/openapi.json index c83a1fe881..0a7fdf6522 100644 --- a/open-api/openapi.json +++ b/open-api/openapi.json @@ -2,7 +2,7 @@ "openapi" : "3.0.1", "info" : { "title" : "Aries Cloud Agent", - "version" : "v0.12.0rc1" + "version" : "v0.12.0rc2" }, "servers" : [ { "url" : "/" @@ -376,7 +376,7 @@ "in" : "query", "name" : "state", "schema" : { - "enum" : [ "abandoned", "invitation", "active", "response", "request", "start", "error", "completed", "init" ], + "enum" : [ "abandoned", "invitation", "response", "active", "start", "request", "error", "init", "completed" ], "type" : "string" } }, { diff --git a/open-api/swagger.json b/open-api/swagger.json index a1982cf310..6e07a870f8 100644 --- a/open-api/swagger.json +++ b/open-api/swagger.json @@ -1,7 +1,7 @@ { "swagger" : "2.0", "info" : { - "version" : "v0.12.0rc1", + "version" : "v0.12.0rc2", "title" : "Aries Cloud Agent" }, "tags" : [ { @@ -339,7 +339,7 @@ "description" : "Connection state", "required" : false, "type" : "string", - "enum" : [ "abandoned", "invitation", "active", "response", "request", "start", "error", "completed", "init" ] + "enum" : [ "abandoned", "invitation", "response", "active", "start", "request", "error", "init", "completed" ] }, { "name" : "their_did", "in" : "query", diff --git a/pyproject.toml b/pyproject.toml index 0f7e9242ff..32b25ae445 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aries_cloudagent" -version = "0.12.0rc1" +version = "0.12.0rc2" description = "Hyperledger Aries Cloud Agent Python (ACA-Py) is a foundation for building decentralized identity applications and services running in non-mobile environments. " authors = ["Hyperledger Aries "] license = "Apache-2.0" From 8bf8da2126aa246d09c72bf6eaf2a455c2ebb406 Mon Sep 17 00:00:00 2001 From: jamshale Date: Wed, 6 Mar 2024 18:30:32 +0000 Subject: [PATCH 62/69] Integration tests - Add retry to did registration Signed-off-by: jamshale --- demo/bdd_support/agent_backchannel_client.py | 3 ++- demo/features/steps/0586-sign-transaction.py | 18 ++++++++----- demo/runners/agent_container.py | 28 +++++++++++--------- demo/runners/support/agent.py | 23 ++++++++-------- 4 files changed, 41 insertions(+), 31 deletions(-) diff --git a/demo/bdd_support/agent_backchannel_client.py b/demo/bdd_support/agent_backchannel_client.py index 7dd2c24828..5d116e2fea 100644 --- a/demo/bdd_support/agent_backchannel_client.py +++ b/demo/bdd_support/agent_backchannel_client.py @@ -4,7 +4,6 @@ from runners.agent_container import AgentContainer, create_agent_with_args_list - ###################################################################### # coroutine utilities ###################################################################### @@ -246,6 +245,7 @@ def agent_container_POST( data: dict = None, text: bool = False, params: dict = None, + raise_error: bool = True, ) -> dict: return run_coroutine( the_container.admin_POST, @@ -253,6 +253,7 @@ def agent_container_POST( data=data, text=text, params=params, + raise_error=raise_error, ) diff --git a/demo/features/steps/0586-sign-transaction.py b/demo/features/steps/0586-sign-transaction.py index e0786afc38..25a63e46c9 100644 --- a/demo/features/steps/0586-sign-transaction.py +++ b/demo/features/steps/0586-sign-transaction.py @@ -32,7 +32,7 @@ def step_impl(context, agent_name, did_role): created_did = agent_container_POST(agent["agent"], "/wallet/did/create") # publish to the ledger with did_role - registered_did = agent_container_register_did( + agent_container_register_did( agent["agent"], created_did["result"]["did"], created_did["result"]["verkey"], @@ -40,11 +40,17 @@ def step_impl(context, agent_name, did_role): ) # make the new did the wallet's public did - published_did = agent_container_POST( - agent["agent"], - "/wallet/did/public", - params={"did": created_did["result"]["did"]}, - ) + retries = 3 + for retry in range(retries): + published_did = agent_container_POST( + agent["agent"], + "/wallet/did/public", + params={"did": created_did["result"]["did"]}, + raise_error=retries - 1 == retry, + ) + if "result" in published_did or "txn" in published_did: + break + if "result" in published_did: # published right away! pass diff --git a/demo/runners/agent_container.py b/demo/runners/agent_container.py index b2db5ed138..23afb3f49a 100644 --- a/demo/runners/agent_container.py +++ b/demo/runners/agent_container.py @@ -7,26 +7,25 @@ import sys import time from typing import List -import yaml - -from qrcode import QRCode +import yaml from aiohttp import ClientError +from qrcode import QRCode sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from runners.support.agent import ( # noqa:E402 - DemoAgent, - default_genesis_txns, - start_mediator_agent, - connect_wallet_to_mediator, - start_endorser_agent, - connect_wallet_to_endorser, - WALLET_TYPE_INDY, CRED_FORMAT_INDY, CRED_FORMAT_JSON_LD, DID_METHOD_KEY, KEY_TYPE_BLS, + WALLET_TYPE_INDY, + DemoAgent, + connect_wallet_to_endorser, + connect_wallet_to_mediator, + default_genesis_txns, + start_endorser_agent, + start_mediator_agent, ) from runners.support.utils import ( # noqa:E402 check_requires, @@ -36,7 +35,6 @@ log_timer, ) - CRED_PREVIEW_TYPE = "https://didcomm.org/issue-credential/2.0/credential-preview" SELF_ATTESTED = os.getenv("SELF_ATTESTED") TAILS_FILE_COUNT = int(os.getenv("TAILS_FILE_COUNT", 100)) @@ -1174,7 +1172,9 @@ async def admin_GET(self, path, text=False, params=None) -> dict: """ return await self.agent.admin_GET(path, text=text, params=params) - async def admin_POST(self, path, data=None, text=False, params=None) -> dict: + async def admin_POST( + self, path, data=None, text=False, params=None, raise_error=True + ) -> dict: """Execute an admin POST request in the context of the current wallet. path = /path/of/request @@ -1182,7 +1182,9 @@ async def admin_POST(self, path, data=None, text=False, params=None) -> dict: text = True if the expected response is text, False if json data params = any additional parameters to pass with the request """ - return await self.agent.admin_POST(path, data=data, text=text, params=params) + return await self.agent.admin_POST( + path, data=data, text=text, params=params, raise_error=raise_error + ) async def admin_PATCH(self, path, data=None, text=False, params=None) -> dict: """Execute an admin PATCH request in the context of the current wallet. diff --git a/demo/runners/support/agent.py b/demo/runners/support/agent.py index a114ef00a3..5e2e34be86 100644 --- a/demo/runners/support/agent.py +++ b/demo/runners/support/agent.py @@ -1,6 +1,4 @@ import asyncio -from concurrent.futures import ThreadPoolExecutor -import asyncpg import base64 import functools import json @@ -9,17 +7,18 @@ import random import subprocess import sys -import yaml - +from concurrent.futures import ThreadPoolExecutor from timeit import default_timer +import asyncpg +import yaml from aiohttp import ( - web, - ClientSession, + ClientError, ClientRequest, ClientResponse, - ClientError, + ClientSession, ClientTimeout, + web, ) from .utils import flatten, log_json, log_msg, log_timer, output_reader @@ -1045,17 +1044,17 @@ async def handle_problem_report(self, message): ) async def handle_endorse_transaction(self, message): - self.log(f"Received endorse transaction ...\n", source="stderr") + self.log("Received endorse transaction ...\n", source="stderr") async def handle_revocation_registry(self, message): reg_id = message.get("revoc_reg_id", "(undetermined)") self.log(f"Revocation registry: {reg_id} state: {message['state']}") async def handle_mediation(self, message): - self.log(f"Received mediation message ...\n") + self.log("Received mediation message ...\n") async def handle_keylist(self, message): - self.log(f"Received handle_keylist message ...\n") + self.log("Received handle_keylist message ...\n") self.log(json.dumps(message)) async def taa_accept(self): @@ -1167,7 +1166,7 @@ async def agency_admin_POST( raise async def admin_POST( - self, path, data=None, text=False, params=None, headers=None + self, path, data=None, text=False, params=None, headers=None, raise_error=True ) -> ClientResponse: try: EVENT_LOGGER.debug( @@ -1192,6 +1191,8 @@ async def admin_POST( return response except ClientError as e: self.log(f"Error during POST {path}: {str(e)}") + if not raise_error: + return None raise async def admin_PATCH( From a1f5415c56a205df4bd948df1a633833a29ca1c0 Mon Sep 17 00:00:00 2001 From: Orjiene Kenechukwu Date: Thu, 7 Mar 2024 12:33:13 +0100 Subject: [PATCH 63/69] vcdi proposal format integration Signed-off-by: Orjiene Kenechukwu --- .../credential_definitions/vcdi/util.py | 73 +++++++++++++++++++ aries_cloudagent/messaging/valid.py | 70 ++++++++++++++++++ .../v2_0/formats/vc_di/handler.py | 4 +- .../issue_credential/v2_0/message_types.py | 2 +- .../protocols/issue_credential/v2_0/routes.py | 52 +++++++++++++ 5 files changed, 199 insertions(+), 2 deletions(-) create mode 100644 aries_cloudagent/messaging/credential_definitions/vcdi/util.py diff --git a/aries_cloudagent/messaging/credential_definitions/vcdi/util.py b/aries_cloudagent/messaging/credential_definitions/vcdi/util.py new file mode 100644 index 0000000000..c703b7685d --- /dev/null +++ b/aries_cloudagent/messaging/credential_definitions/vcdi/util.py @@ -0,0 +1,73 @@ +"""Credential definition utilities.""" + +import re + +from marshmallow import fields + +from ....core.profile import Profile +from ...models.openapi import OpenAPISchema +from ...valid import ( + VCDI_CRED_DEF_ID_EXAMPLE, + VCDI_CRED_DEF_ID_VALIDATE, + VCDI_DID_EXAMPLE, + VCDI_DID_VALIDATE, + VCDI_SCHEMA_ID_EXAMPLE, + VCDI_SCHEMA_ID_VALIDATE, + VCDI_VERSION_EXAMPLE, + VCDI_VERSION_VALIDATE, +) + +CRED_DEF_SENT_RECORD_TYPE = "cred_def_sent" + + +class VCDICredDefQueryStringSchema(OpenAPISchema): + """Query string parameters for credential definition searches.""" + + schema_id = fields.Str( + required=False, + validate=VCDI_SCHEMA_ID_VALIDATE, + metadata={ + "description": "Schema identifier", + "example": VCDI_SCHEMA_ID_EXAMPLE, + }, + ) + schema_issuer_did = fields.Str( + required=False, + validate=VCDI_DID_VALIDATE, + metadata={"description": "Schema issuer DID", "example": VCDI_DID_EXAMPLE}, + ) + schema_name = fields.Str( + required=False, metadata={"description": "Schema name", "example": "membership"} + ) + schema_version = fields.Str( + required=False, + validate=VCDI_VERSION_VALIDATE, + metadata={"description": "Schema version", "example": VCDI_VERSION_EXAMPLE}, + ) + issuer_did = fields.Str( + required=False, + validate=VCDI_DID_VALIDATE, + metadata={"description": "Issuer DID", "example": VCDI_DID_EXAMPLE}, + ) + cred_def_id = fields.Str( + required=False, + validate=VCDI_CRED_DEF_ID_VALIDATE, + metadata={ + "description": "Credential definition id", + "example": VCDI_CRED_DEF_ID_EXAMPLE, + }, + ) + + +CRED_DEF_TAGS = list(vars(VCDICredDefQueryStringSchema).get("_declared_fields", [])) + +CRED_DEF_EVENT_PREFIX = "acapy::CRED_DEF::" +EVENT_LISTENER_PATTERN = re.compile(f"^{CRED_DEF_EVENT_PREFIX}(.*)?$") + + +async def notify_cred_def_event(profile: Profile, cred_def_id: str, meta_data: dict): + """Send notification for a cred def post-process event.""" + await profile.notify( + CRED_DEF_EVENT_PREFIX + cred_def_id, + meta_data, + ) \ No newline at end of file diff --git a/aries_cloudagent/messaging/valid.py b/aries_cloudagent/messaging/valid.py index c2266197ff..0da9c8a1bb 100644 --- a/aries_cloudagent/messaging/valid.py +++ b/aries_cloudagent/messaging/valid.py @@ -348,6 +348,19 @@ def __init__(self): error="Value {input} is not an indy decentralized identifier (DID)", ) +class VcdiDID(Regexp): + """Validate value against vcdi DID.""" + + EXAMPLE = "WgWxqztrNooG92RXvxSTWv" + PATTERN = re.compile(rf"^(did:key:)?[{B58}]{{21,22}}$") + + def __init__(self): + """Initialize the instance.""" + + super().__init__( + VcdiDID.PATTERN, + error="Value {input} is not an vcdi decentralized identifier (DID)", + ) class DIDValidation(Regexp): """Validate value against any valid DID spec.""" @@ -444,6 +457,25 @@ def __init__(self): error="Value {input} is not an indy credential definition identifier", ) +class VcdiCredDefId(Regexp): + """Validate value against vcdi credential definition identifier specification.""" + + EXAMPLE = "WgWxqztrNooG92RXvxSTWv:3:CL:20:tag" + PATTERN = ( + rf"^([{B58}]{{21,22}})" # issuer DID + f":3" # cred def id marker + f":CL" # sig alg + rf":(([1-9][0-9]*)|([{B58}]{{21,22}}:2:.+:[0-9.]+))" # schema txn / id + f":(.+)?$" # tag + ) + + def __init__(self): + """Initialize the instance.""" + + super().__init__( + VcdiCredDefId.PATTERN, + error="Value {input} is not an vcdi credential definition identifier", + ) class IndyVersion(Regexp): """Validate value against indy version specification.""" @@ -459,6 +491,19 @@ def __init__(self): error="Value {input} is not an indy version (use only digits and '.')", ) +class VcdiVersion(Regexp): + """Validate value against vcdi version specification.""" + + EXAMPLE = "0.1" + PATTERN = r"^[0-9.]+$" + + def __init__(self): + """Initialize the instance.""" + + super().__init__( + VcdiVersion.PATTERN, + error="Value {input} is not an vcdi version (use only digits and '.')", + ) class IndySchemaId(Regexp): """Validate value against indy schema identifier specification.""" @@ -474,6 +519,19 @@ def __init__(self): error="Value {input} is not an indy schema identifier", ) +class VcdiSchemaId(Regexp): + """Validate value against vcdi schema identifier specification.""" + + EXAMPLE = "WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0" + PATTERN = rf"^[{B58}]{{21,22}}:2:.+:[0-9.]+$" + + def __init__(self): + """Initialize the instance.""" + + super().__init__( + VcdiSchemaId.PATTERN, + error="Value {input} is not an vcdi schema identifier", + ) class IndyRevRegId(Regexp): """Validate value against indy revocation registry identifier specification.""" @@ -938,6 +996,9 @@ def __init__( INDY_DID_VALIDATE = IndyDID() INDY_DID_EXAMPLE = IndyDID.EXAMPLE +VCDI_DID_VALIDATE = VcdiDID() +VCDI_DID_EXAMPLE = VcdiDID.EXAMPLE + GENERIC_DID_VALIDATE = MaybeIndyDID() GENERIC_DID_EXAMPLE = MaybeIndyDID.EXAMPLE @@ -947,9 +1008,15 @@ def __init__( INDY_SCHEMA_ID_VALIDATE = IndySchemaId() INDY_SCHEMA_ID_EXAMPLE = IndySchemaId.EXAMPLE +VCDI_SCHEMA_ID_VALIDATE = VcdiSchemaId() +VCDI_SCHEMA_ID_EXAMPLE = VcdiSchemaId.EXAMPLE + INDY_CRED_DEF_ID_VALIDATE = IndyCredDefId() INDY_CRED_DEF_ID_EXAMPLE = IndyCredDefId.EXAMPLE +VCDI_CRED_DEF_ID_VALIDATE = VcdiCredDefId() +VCDI_CRED_DEF_ID_EXAMPLE = VcdiCredDefId.EXAMPLE + INDY_REV_REG_ID_VALIDATE = IndyRevRegId() INDY_REV_REG_ID_EXAMPLE = IndyRevRegId.EXAMPLE @@ -959,6 +1026,9 @@ def __init__( INDY_VERSION_VALIDATE = IndyVersion() INDY_VERSION_EXAMPLE = IndyVersion.EXAMPLE +VCDI_VERSION_VALIDATE = VcdiVersion() +VCDI_VERSION_EXAMPLE = VcdiVersion.EXAMPLE + INDY_PREDICATE_VALIDATE = IndyPredicate() INDY_PREDICATE_EXAMPLE = IndyPredicate.EXAMPLE diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py index 50a8e40979..c43b9c025b 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py @@ -47,6 +47,8 @@ CRED_DEF_SENT_RECORD_TYPE, CredDefQueryStringSchema, ) +from ......messaging.credential_definitions.util import CRED_DEF_SENT_RECORD_TYPE +from ......messaging.credential_definitions.vcdi.util import VCDICredDefQueryStringSchema from ......messaging.decorators.attach_decorator import AttachDecorator from ......multitenant.base import BaseMultitenantManager from ......revocation_anoncreds.models.issuer_cred_rev_record import IssuerCredRevRecord @@ -94,7 +96,7 @@ def validate_fields(cls, message_type: str, attachment_data: Mapping): """ mapping = { - CRED_20_PROPOSAL: CredDefQueryStringSchema, + CRED_20_PROPOSAL: VCDICredDefQueryStringSchema, CRED_20_OFFER: VCDICredAbstractSchema, CRED_20_REQUEST: VCDICredRequestSchema, CRED_20_ISSUE: VerifiableCredentialSchema, diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py b/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py index b2b9e4325e..8bfe0615ea 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py @@ -41,7 +41,7 @@ CRED_20_PROPOSAL: { V20CredFormat.Format.INDY.api: "hlindy/cred-filter@v2.0", V20CredFormat.Format.LD_PROOF.api: "aries/ld-proof-vc-detail@v1.0", - V20CredFormat.Format.VC_DI.api: "hlindy/cred-filter@v2.0", + V20CredFormat.Format.VC_DI.api: "didcomm/w3c-di-vc-offer@v0.1", }, CRED_20_OFFER: { V20CredFormat.Format.INDY.api: "hlindy/cred-abstract@v2.0", diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/routes.py b/aries_cloudagent/protocols/issue_credential/v2_0/routes.py index 1341ae4dc2..df57353bd1 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/routes.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/routes.py @@ -34,6 +34,14 @@ INDY_SCHEMA_ID_VALIDATE, INDY_VERSION_EXAMPLE, INDY_VERSION_VALIDATE, + VCDI_CRED_DEF_ID_EXAMPLE, + VCDI_CRED_DEF_ID_VALIDATE, + VCDI_DID_EXAMPLE, + VCDI_DID_VALIDATE, + VCDI_SCHEMA_ID_EXAMPLE, + VCDI_SCHEMA_ID_VALIDATE, + VCDI_VERSION_EXAMPLE, + VCDI_VERSION_VALIDATE, UUID4_EXAMPLE, UUID4_VALIDATE, ) @@ -168,6 +176,45 @@ class V20CredFilterIndySchema(OpenAPISchema): metadata={"description": "Credential issuer DID", "example": INDY_DID_EXAMPLE}, ) +class V20CredFilterVCDISchema(OpenAPISchema): + """VCDI credential filtration criteria.""" + + cred_def_id = fields.Str( + required=False, + validate=VCDI_CRED_DEF_ID_VALIDATE, + metadata={ + "description": "Credential definition identifier", + "example": VCDI_CRED_DEF_ID_EXAMPLE, + }, + ) + schema_id = fields.Str( + required=False, + validate=VCDI_SCHEMA_ID_VALIDATE, + metadata={ + "description": "Schema identifier", + "example": VCDI_SCHEMA_ID_EXAMPLE, + }, + ) + schema_issuer_did = fields.Str( + required=False, + validate=VCDI_DID_VALIDATE, + metadata={"description": "Schema issuer DID", "example": VCDI_DID_EXAMPLE}, + ) + schema_name = fields.Str( + required=False, + metadata={"description": "Schema name", "example": "preferences"}, + ) + schema_version = fields.Str( + required=False, + validate=VCDI_VERSION_VALIDATE, + metadata={"description": "Schema version", "example": VCDI_VERSION_EXAMPLE}, + ) + issuer_did = fields.Str( + required=False, + validate=VCDI_DID_VALIDATE, + metadata={"description": "Credential issuer DID", "example": VCDI_DID_EXAMPLE}, + ) + class V20CredFilterSchema(OpenAPISchema): """Credential filtration criteria.""" @@ -182,6 +229,11 @@ class V20CredFilterSchema(OpenAPISchema): required=False, metadata={"description": "Credential filter for linked data proof"}, ) + ld_proof = fields.Nested( + V20CredFilterVCDISchema, + required=False, + metadata={"description": "Credential filter for linked data proof"}, + ) @validates_schema def validate_fields(self, data, **kwargs): From 9d5dbc384b3895e0e0399f4d72b621a15f1c00eb Mon Sep 17 00:00:00 2001 From: Orjiene Kenechukwu Date: Thu, 7 Mar 2024 12:56:40 +0100 Subject: [PATCH 64/69] passing the test Signed-off-by: Orjiene Kenechukwu --- .../issue_credential/v2_0/formats/vc_di/handler.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py index c43b9c025b..c2232636ae 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py @@ -3,10 +3,6 @@ import json import logging from typing import Mapping, Tuple -from aries_cloudagent.protocols.issue_credential.v2_0.manager import ( - V20CredManager, - V20CredManagerError, -) from aries_cloudagent.protocols.issue_credential.v2_0.models.detail.vc_di import ( V20CredExRecordVCDI, ) @@ -25,15 +21,12 @@ from ......anoncreds.issuer import ( AnonCredsIssuer, ) -from ......indy.models.cred import IndyCredentialSchema from ......indy.models.cred_abstract import ( - IndyCredAbstractSchema, VCDICredAbstract, VCDICredAbstractSchema, ) from ......indy.models.cred_request import ( BindingProof, - IndyCredRequestSchema, VCDICredRequest, VCDICredRequestSchema, ) @@ -43,10 +36,6 @@ GET_CRED_DEF, IndyLedgerRequestsExecutor, ) -from ......messaging.credential_definitions.util import ( - CRED_DEF_SENT_RECORD_TYPE, - CredDefQueryStringSchema, -) from ......messaging.credential_definitions.util import CRED_DEF_SENT_RECORD_TYPE from ......messaging.credential_definitions.vcdi.util import VCDICredDefQueryStringSchema from ......messaging.decorators.attach_decorator import AttachDecorator From f740b3b60f549df5a466b35842c2d49dcf4e143e Mon Sep 17 00:00:00 2001 From: Orjiene Kenechukwu Date: Thu, 7 Mar 2024 13:01:40 +0100 Subject: [PATCH 65/69] vcdi proposal format integration Signed-off-by: Orjiene Kenechukwu --- .../protocols/issue_credential/v2_0/routes.py | 46 ------------------- 1 file changed, 46 deletions(-) diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/routes.py b/aries_cloudagent/protocols/issue_credential/v2_0/routes.py index b68533f798..bf522b0f59 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/routes.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/routes.py @@ -219,47 +219,6 @@ class V20CredFilterVCDISchema(OpenAPISchema): metadata={"description": "Credential issuer DID", "example": VCDI_DID_EXAMPLE}, ) - -class V20CredFilterVCDISchema(OpenAPISchema): - """VCDI credential filtration criteria.""" - - cred_def_id = fields.Str( - required=False, - validate=INDY_CRED_DEF_ID_VALIDATE, - metadata={ - "description": "Credential definition identifier", - "example": INDY_CRED_DEF_ID_EXAMPLE, - }, - ) - schema_id = fields.Str( - required=False, - validate=INDY_SCHEMA_ID_VALIDATE, - metadata={ - "description": "Schema identifier", - "example": INDY_SCHEMA_ID_EXAMPLE, - }, - ) - schema_issuer_did = fields.Str( - required=False, - validate=INDY_DID_VALIDATE, - metadata={"description": "Schema issuer DID", "example": INDY_DID_EXAMPLE}, - ) - schema_name = fields.Str( - required=False, - metadata={"description": "Schema name", "example": "preferences"}, - ) - schema_version = fields.Str( - required=False, - validate=INDY_VERSION_VALIDATE, - metadata={"description": "Schema version", "example": INDY_VERSION_EXAMPLE}, - ) - issuer_did = fields.Str( - required=False, - validate=INDY_DID_VALIDATE, - metadata={"description": "Credential issuer DID", "example": INDY_DID_EXAMPLE}, - ) - - class V20CredFilterSchema(OpenAPISchema): """Credential filtration criteria.""" @@ -273,11 +232,6 @@ class V20CredFilterSchema(OpenAPISchema): required=False, metadata={"description": "Credential filter for linked data proof"}, ) - ld_proof = fields.Nested( - V20CredFilterVCDISchema, - required=False, - metadata={"description": "Credential filter for linked data proof"}, - ) vc_di = fields.Nested( V20CredFilterVCDISchema, From f71f009134dbef69ab4c0b4a21b57624465a0b11 Mon Sep 17 00:00:00 2001 From: Orjiene Kenechukwu Date: Thu, 7 Mar 2024 16:49:05 +0100 Subject: [PATCH 66/69] passing the test Signed-off-by: Orjiene Kenechukwu --- .../protocols/issue_credential/v2_0/formats/vc_di/handler.py | 1 - 1 file changed, 1 deletion(-) diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py index c2232636ae..2d9a3b5599 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py @@ -15,7 +15,6 @@ from marshmallow import RAISE from ......anoncreds.revocation import AnonCredsRevocation - from ......anoncreds.registry import AnonCredsRegistry from ......anoncreds.holder import AnonCredsHolder, AnonCredsHolderError from ......anoncreds.issuer import ( From e435f3ddb061e3d2522d0f00f8bdd9e0575784e5 Mon Sep 17 00:00:00 2001 From: Golda Velez Date: Thu, 7 Mar 2024 23:27:53 -0700 Subject: [PATCH 67/69] Revert "Refactoring by removing redundancy" --- .../credential_definitions/vcdi/util.py | 73 ------------------- aries_cloudagent/messaging/valid.py | 70 ------------------ .../v2_0/formats/vc_di/handler.py | 16 +++- .../issue_credential/v2_0/message_types.py | 6 +- .../protocols/issue_credential/v2_0/routes.py | 30 +++----- demo/runners/agent_container.py | 2 +- demo/runners/support/agent.py | 56 ++++++++++---- 7 files changed, 72 insertions(+), 181 deletions(-) delete mode 100644 aries_cloudagent/messaging/credential_definitions/vcdi/util.py diff --git a/aries_cloudagent/messaging/credential_definitions/vcdi/util.py b/aries_cloudagent/messaging/credential_definitions/vcdi/util.py deleted file mode 100644 index c703b7685d..0000000000 --- a/aries_cloudagent/messaging/credential_definitions/vcdi/util.py +++ /dev/null @@ -1,73 +0,0 @@ -"""Credential definition utilities.""" - -import re - -from marshmallow import fields - -from ....core.profile import Profile -from ...models.openapi import OpenAPISchema -from ...valid import ( - VCDI_CRED_DEF_ID_EXAMPLE, - VCDI_CRED_DEF_ID_VALIDATE, - VCDI_DID_EXAMPLE, - VCDI_DID_VALIDATE, - VCDI_SCHEMA_ID_EXAMPLE, - VCDI_SCHEMA_ID_VALIDATE, - VCDI_VERSION_EXAMPLE, - VCDI_VERSION_VALIDATE, -) - -CRED_DEF_SENT_RECORD_TYPE = "cred_def_sent" - - -class VCDICredDefQueryStringSchema(OpenAPISchema): - """Query string parameters for credential definition searches.""" - - schema_id = fields.Str( - required=False, - validate=VCDI_SCHEMA_ID_VALIDATE, - metadata={ - "description": "Schema identifier", - "example": VCDI_SCHEMA_ID_EXAMPLE, - }, - ) - schema_issuer_did = fields.Str( - required=False, - validate=VCDI_DID_VALIDATE, - metadata={"description": "Schema issuer DID", "example": VCDI_DID_EXAMPLE}, - ) - schema_name = fields.Str( - required=False, metadata={"description": "Schema name", "example": "membership"} - ) - schema_version = fields.Str( - required=False, - validate=VCDI_VERSION_VALIDATE, - metadata={"description": "Schema version", "example": VCDI_VERSION_EXAMPLE}, - ) - issuer_did = fields.Str( - required=False, - validate=VCDI_DID_VALIDATE, - metadata={"description": "Issuer DID", "example": VCDI_DID_EXAMPLE}, - ) - cred_def_id = fields.Str( - required=False, - validate=VCDI_CRED_DEF_ID_VALIDATE, - metadata={ - "description": "Credential definition id", - "example": VCDI_CRED_DEF_ID_EXAMPLE, - }, - ) - - -CRED_DEF_TAGS = list(vars(VCDICredDefQueryStringSchema).get("_declared_fields", [])) - -CRED_DEF_EVENT_PREFIX = "acapy::CRED_DEF::" -EVENT_LISTENER_PATTERN = re.compile(f"^{CRED_DEF_EVENT_PREFIX}(.*)?$") - - -async def notify_cred_def_event(profile: Profile, cred_def_id: str, meta_data: dict): - """Send notification for a cred def post-process event.""" - await profile.notify( - CRED_DEF_EVENT_PREFIX + cred_def_id, - meta_data, - ) \ No newline at end of file diff --git a/aries_cloudagent/messaging/valid.py b/aries_cloudagent/messaging/valid.py index 0da9c8a1bb..c2266197ff 100644 --- a/aries_cloudagent/messaging/valid.py +++ b/aries_cloudagent/messaging/valid.py @@ -348,19 +348,6 @@ def __init__(self): error="Value {input} is not an indy decentralized identifier (DID)", ) -class VcdiDID(Regexp): - """Validate value against vcdi DID.""" - - EXAMPLE = "WgWxqztrNooG92RXvxSTWv" - PATTERN = re.compile(rf"^(did:key:)?[{B58}]{{21,22}}$") - - def __init__(self): - """Initialize the instance.""" - - super().__init__( - VcdiDID.PATTERN, - error="Value {input} is not an vcdi decentralized identifier (DID)", - ) class DIDValidation(Regexp): """Validate value against any valid DID spec.""" @@ -457,25 +444,6 @@ def __init__(self): error="Value {input} is not an indy credential definition identifier", ) -class VcdiCredDefId(Regexp): - """Validate value against vcdi credential definition identifier specification.""" - - EXAMPLE = "WgWxqztrNooG92RXvxSTWv:3:CL:20:tag" - PATTERN = ( - rf"^([{B58}]{{21,22}})" # issuer DID - f":3" # cred def id marker - f":CL" # sig alg - rf":(([1-9][0-9]*)|([{B58}]{{21,22}}:2:.+:[0-9.]+))" # schema txn / id - f":(.+)?$" # tag - ) - - def __init__(self): - """Initialize the instance.""" - - super().__init__( - VcdiCredDefId.PATTERN, - error="Value {input} is not an vcdi credential definition identifier", - ) class IndyVersion(Regexp): """Validate value against indy version specification.""" @@ -491,19 +459,6 @@ def __init__(self): error="Value {input} is not an indy version (use only digits and '.')", ) -class VcdiVersion(Regexp): - """Validate value against vcdi version specification.""" - - EXAMPLE = "0.1" - PATTERN = r"^[0-9.]+$" - - def __init__(self): - """Initialize the instance.""" - - super().__init__( - VcdiVersion.PATTERN, - error="Value {input} is not an vcdi version (use only digits and '.')", - ) class IndySchemaId(Regexp): """Validate value against indy schema identifier specification.""" @@ -519,19 +474,6 @@ def __init__(self): error="Value {input} is not an indy schema identifier", ) -class VcdiSchemaId(Regexp): - """Validate value against vcdi schema identifier specification.""" - - EXAMPLE = "WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0" - PATTERN = rf"^[{B58}]{{21,22}}:2:.+:[0-9.]+$" - - def __init__(self): - """Initialize the instance.""" - - super().__init__( - VcdiSchemaId.PATTERN, - error="Value {input} is not an vcdi schema identifier", - ) class IndyRevRegId(Regexp): """Validate value against indy revocation registry identifier specification.""" @@ -996,9 +938,6 @@ def __init__( INDY_DID_VALIDATE = IndyDID() INDY_DID_EXAMPLE = IndyDID.EXAMPLE -VCDI_DID_VALIDATE = VcdiDID() -VCDI_DID_EXAMPLE = VcdiDID.EXAMPLE - GENERIC_DID_VALIDATE = MaybeIndyDID() GENERIC_DID_EXAMPLE = MaybeIndyDID.EXAMPLE @@ -1008,15 +947,9 @@ def __init__( INDY_SCHEMA_ID_VALIDATE = IndySchemaId() INDY_SCHEMA_ID_EXAMPLE = IndySchemaId.EXAMPLE -VCDI_SCHEMA_ID_VALIDATE = VcdiSchemaId() -VCDI_SCHEMA_ID_EXAMPLE = VcdiSchemaId.EXAMPLE - INDY_CRED_DEF_ID_VALIDATE = IndyCredDefId() INDY_CRED_DEF_ID_EXAMPLE = IndyCredDefId.EXAMPLE -VCDI_CRED_DEF_ID_VALIDATE = VcdiCredDefId() -VCDI_CRED_DEF_ID_EXAMPLE = VcdiCredDefId.EXAMPLE - INDY_REV_REG_ID_VALIDATE = IndyRevRegId() INDY_REV_REG_ID_EXAMPLE = IndyRevRegId.EXAMPLE @@ -1026,9 +959,6 @@ def __init__( INDY_VERSION_VALIDATE = IndyVersion() INDY_VERSION_EXAMPLE = IndyVersion.EXAMPLE -VCDI_VERSION_VALIDATE = VcdiVersion() -VCDI_VERSION_EXAMPLE = VcdiVersion.EXAMPLE - INDY_PREDICATE_VALIDATE = IndyPredicate() INDY_PREDICATE_EXAMPLE = IndyPredicate.EXAMPLE diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py index 2d9a3b5599..50a8e40979 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py @@ -3,6 +3,10 @@ import json import logging from typing import Mapping, Tuple +from aries_cloudagent.protocols.issue_credential.v2_0.manager import ( + V20CredManager, + V20CredManagerError, +) from aries_cloudagent.protocols.issue_credential.v2_0.models.detail.vc_di import ( V20CredExRecordVCDI, ) @@ -15,17 +19,21 @@ from marshmallow import RAISE from ......anoncreds.revocation import AnonCredsRevocation + from ......anoncreds.registry import AnonCredsRegistry from ......anoncreds.holder import AnonCredsHolder, AnonCredsHolderError from ......anoncreds.issuer import ( AnonCredsIssuer, ) +from ......indy.models.cred import IndyCredentialSchema from ......indy.models.cred_abstract import ( + IndyCredAbstractSchema, VCDICredAbstract, VCDICredAbstractSchema, ) from ......indy.models.cred_request import ( BindingProof, + IndyCredRequestSchema, VCDICredRequest, VCDICredRequestSchema, ) @@ -35,8 +43,10 @@ GET_CRED_DEF, IndyLedgerRequestsExecutor, ) -from ......messaging.credential_definitions.util import CRED_DEF_SENT_RECORD_TYPE -from ......messaging.credential_definitions.vcdi.util import VCDICredDefQueryStringSchema +from ......messaging.credential_definitions.util import ( + CRED_DEF_SENT_RECORD_TYPE, + CredDefQueryStringSchema, +) from ......messaging.decorators.attach_decorator import AttachDecorator from ......multitenant.base import BaseMultitenantManager from ......revocation_anoncreds.models.issuer_cred_rev_record import IssuerCredRevRecord @@ -84,7 +94,7 @@ def validate_fields(cls, message_type: str, attachment_data: Mapping): """ mapping = { - CRED_20_PROPOSAL: VCDICredDefQueryStringSchema, + CRED_20_PROPOSAL: CredDefQueryStringSchema, CRED_20_OFFER: VCDICredAbstractSchema, CRED_20_REQUEST: VCDICredRequestSchema, CRED_20_ISSUE: VerifiableCredentialSchema, diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py b/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py index 8bfe0615ea..81cb20a244 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py @@ -22,7 +22,9 @@ MESSAGE_TYPES = DIDCommPrefix.qualify_all( { - CRED_20_PROPOSAL: (f"{PROTOCOL_PACKAGE}.messages.cred_proposal.V20CredProposal"), + CRED_20_PROPOSAL: ( + f"{PROTOCOL_PACKAGE}.messages.cred_proposal.V20CredProposal" + ), CRED_20_OFFER: f"{PROTOCOL_PACKAGE}.messages.cred_offer.V20CredOffer", CRED_20_REQUEST: f"{PROTOCOL_PACKAGE}.messages.cred_request.V20CredRequest", CRED_20_ISSUE: f"{PROTOCOL_PACKAGE}.messages.cred_issue.V20CredIssue", @@ -41,7 +43,7 @@ CRED_20_PROPOSAL: { V20CredFormat.Format.INDY.api: "hlindy/cred-filter@v2.0", V20CredFormat.Format.LD_PROOF.api: "aries/ld-proof-vc-detail@v1.0", - V20CredFormat.Format.VC_DI.api: "didcomm/w3c-di-vc-offer@v0.1", + V20CredFormat.Format.VC_DI.api: "hlindy/cred-filter@v2.0", }, CRED_20_OFFER: { V20CredFormat.Format.INDY.api: "hlindy/cred-abstract@v2.0", diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/routes.py b/aries_cloudagent/protocols/issue_credential/v2_0/routes.py index bf522b0f59..7188df85ce 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/routes.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/routes.py @@ -37,14 +37,6 @@ INDY_SCHEMA_ID_VALIDATE, INDY_VERSION_EXAMPLE, INDY_VERSION_VALIDATE, - VCDI_CRED_DEF_ID_EXAMPLE, - VCDI_CRED_DEF_ID_VALIDATE, - VCDI_DID_EXAMPLE, - VCDI_DID_VALIDATE, - VCDI_SCHEMA_ID_EXAMPLE, - VCDI_SCHEMA_ID_VALIDATE, - VCDI_VERSION_EXAMPLE, - VCDI_VERSION_VALIDATE, UUID4_EXAMPLE, UUID4_VALIDATE, ) @@ -180,29 +172,30 @@ class V20CredFilterIndySchema(OpenAPISchema): metadata={"description": "Credential issuer DID", "example": INDY_DID_EXAMPLE}, ) + class V20CredFilterVCDISchema(OpenAPISchema): """VCDI credential filtration criteria.""" cred_def_id = fields.Str( required=False, - validate=VCDI_CRED_DEF_ID_VALIDATE, + validate=INDY_CRED_DEF_ID_VALIDATE, metadata={ "description": "Credential definition identifier", - "example": VCDI_CRED_DEF_ID_EXAMPLE, + "example": INDY_CRED_DEF_ID_EXAMPLE, }, ) schema_id = fields.Str( required=False, - validate=VCDI_SCHEMA_ID_VALIDATE, + validate=INDY_SCHEMA_ID_VALIDATE, metadata={ "description": "Schema identifier", - "example": VCDI_SCHEMA_ID_EXAMPLE, + "example": INDY_SCHEMA_ID_EXAMPLE, }, ) schema_issuer_did = fields.Str( required=False, - validate=VCDI_DID_VALIDATE, - metadata={"description": "Schema issuer DID", "example": VCDI_DID_EXAMPLE}, + validate=INDY_DID_VALIDATE, + metadata={"description": "Schema issuer DID", "example": INDY_DID_EXAMPLE}, ) schema_name = fields.Str( required=False, @@ -210,15 +203,16 @@ class V20CredFilterVCDISchema(OpenAPISchema): ) schema_version = fields.Str( required=False, - validate=VCDI_VERSION_VALIDATE, - metadata={"description": "Schema version", "example": VCDI_VERSION_EXAMPLE}, + validate=INDY_VERSION_VALIDATE, + metadata={"description": "Schema version", "example": INDY_VERSION_EXAMPLE}, ) issuer_did = fields.Str( required=False, - validate=VCDI_DID_VALIDATE, - metadata={"description": "Credential issuer DID", "example": VCDI_DID_EXAMPLE}, + validate=INDY_DID_VALIDATE, + metadata={"description": "Credential issuer DID", "example": INDY_DID_EXAMPLE}, ) + class V20CredFilterSchema(OpenAPISchema): """Credential filtration criteria.""" diff --git a/demo/runners/agent_container.py b/demo/runners/agent_container.py index 86b6996df0..dbdb7a772e 100644 --- a/demo/runners/agent_container.py +++ b/demo/runners/agent_container.py @@ -697,7 +697,7 @@ def __init__( genesis_txns: str = None, genesis_txn_list: str = None, tails_server_base_url: str = None, - cred_type: str = CRED_FORMAT_INDY or CRED_FORMAT_VC_DI, + cred_type: str = CRED_FORMAT_INDY, show_timing: bool = False, multitenant: bool = False, mediation: bool = False, diff --git a/demo/runners/support/agent.py b/demo/runners/support/agent.py index e983b20fc3..d800341221 100644 --- a/demo/runners/support/agent.py +++ b/demo/runners/support/agent.py @@ -370,7 +370,9 @@ async def register_schema_and_creddef_indy( log_msg("Schema ID:", schema_id) # Create a cred def for the schema - cred_def_tag = tag if tag else (self.ident + "." + schema_name).replace(" ", "_") + cred_def_tag = ( + tag if tag else (self.ident + "." + schema_name).replace(" ", "_") + ) credential_definition_body = { "schema_id": schema_id, "support_revocation": support_revocation, @@ -400,7 +402,9 @@ async def register_schema_and_creddef_indy( credential_definition_response = await self.admin_GET( "/credential-definitions/created" ) - if 0 == len(credential_definition_response["credential_definition_ids"]): + if 0 == len( + credential_definition_response["credential_definition_ids"] + ): await asyncio.sleep(1.0) attempts = attempts - 1 credential_definition_id = credential_definition_response[ @@ -447,7 +451,9 @@ async def register_schema_and_creddef_anoncreds( log_msg("Schema ID:", schema_id) # Create a cred def for the schema - cred_def_tag = tag if tag else (self.ident + "." + schema_name).replace(" ", "_") + cred_def_tag = ( + tag if tag else (self.ident + "." + schema_name).replace(" ", "_") + ) max_cred_num = revocation_registry_size if revocation_registry_size else 0 credential_definition_body = { "credential_definition": { @@ -483,7 +489,9 @@ async def register_schema_and_creddef_anoncreds( credential_definition_response = await self.admin_GET( "/anoncreds/credential-definitions" ) - if 0 == len(credential_definition_response["credential_definition_ids"]): + if 0 == len( + credential_definition_response["credential_definition_ids"] + ): await asyncio.sleep(1.0) attempts = attempts - 1 credential_definition_id = credential_definition_response[ @@ -698,7 +706,9 @@ async def register_did( nym_info = data else: log_msg("using ledger: " + ledger_url + "/register") - resp = await self.client_session.post(ledger_url + "/register", json=data) + resp = await self.client_session.post( + ledger_url + "/register", json=data + ) if resp.status != 200: raise Exception( f"Error registering DID {data}, response code {resp.status}" @@ -912,7 +922,9 @@ def _process(self, args, env, loop): def get_process_args(self): return list( - flatten(([PYTHON, "-m", "aries_cloudagent", "start"], self.get_agent_args())) + flatten( + ([PYTHON, "-m", "aries_cloudagent", "start"], self.get_agent_args()) + ) ) async def start_process(self, python_path: str = None, wait: bool = True): @@ -1128,7 +1140,9 @@ async def admin_GET( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] + headers["Authorization"] = ( + "Bearer " + self.managed_wallet_params["token"] + ) response = await self.admin_request( "GET", path, None, text, params, headers=headers ) @@ -1180,7 +1194,9 @@ async def admin_POST( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] + headers["Authorization"] = ( + "Bearer " + self.managed_wallet_params["token"] + ) response = await self.admin_request( "POST", path, data, text, params, headers=headers ) @@ -1201,7 +1217,9 @@ async def admin_PATCH( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] + headers["Authorization"] = ( + "Bearer " + self.managed_wallet_params["token"] + ) return await self.admin_request( "PATCH", path, data, text, params, headers=headers ) @@ -1216,7 +1234,9 @@ async def admin_PUT( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] + headers["Authorization"] = ( + "Bearer " + self.managed_wallet_params["token"] + ) return await self.admin_request( "PUT", path, data, text, params, headers=headers ) @@ -1236,7 +1256,9 @@ async def admin_DELETE( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] + headers["Authorization"] = ( + "Bearer " + self.managed_wallet_params["token"] + ) response = await self.admin_request( "DELETE", path, data, text, params, headers=headers ) @@ -1255,7 +1277,9 @@ async def admin_GET_FILE(self, path, params=None, headers=None) -> bytes: if self.multitenant: if not headers: headers = {} - headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] + headers["Authorization"] = ( + "Bearer " + self.managed_wallet_params["token"] + ) params = {k: v for (k, v) in (params or {}).items() if v is not None} resp = await self.client_session.request( "GET", self.admin_url + path, params=params, headers=headers @@ -1271,7 +1295,9 @@ async def admin_PUT_FILE(self, files, url, params=None, headers=None) -> bytes: if self.multitenant: if not headers: headers = {} - headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] + headers["Authorization"] = ( + "Bearer " + self.managed_wallet_params["token"] + ) params = {k: v for (k, v) in (params or {}).items() if v is not None} resp = await self.client_session.request( "PUT", url, params=params, data=files, headers=headers @@ -1643,7 +1669,9 @@ async def handle_connections(self, message): # setup endorser meta-data on our connection log_msg("Setup endorser agent meta-data ...") await self.admin_POST( - "/transactions/" + self.endorser_connection_id + "/set-endorser-role", + "/transactions/" + + self.endorser_connection_id + + "/set-endorser-role", params={"transaction_my_job": "TRANSACTION_ENDORSER"}, ) From 84790a0f9be49aa7a14dde53a708640a6bf81fca Mon Sep 17 00:00:00 2001 From: Sarthak Vijayvergiya <18403281+sarthakvijayvergiya@users.noreply.github.com> Date: Fri, 8 Mar 2024 07:29:13 +0100 Subject: [PATCH 68/69] fix liniting & format issues Signed-off-by: Sarthak Vijayvergiya <18403281+sarthakvijayvergiya@users.noreply.github.com> Signed-off-by: Golda Velez --- .../issue_credential/v2_0/formats/vc_di/handler.py | 7 ------- demo/features/environment.py | 1 - demo/features/steps/0453-issue-credential.py | 7 ------- demo/features/steps/0454-present-proof.py | 13 +------------ demo/features/steps/revocation-api.py | 11 +++++------ demo/features/steps/taa-txn-author-agreement.py | 4 ---- demo/playground/examples/tests/__init__.py | 1 - .../examples/tests/test_mediator_ping_agents.py | 6 +++--- demo/playground/examples/tests/test_ping_agents.py | 8 ++++---- demo/runners/acme.py | 3 --- demo/runners/support/agent.py | 6 +++--- demo/runners/support/utils.py | 2 +- 12 files changed, 17 insertions(+), 52 deletions(-) diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py index 50a8e40979..5ef7c2c1ca 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py @@ -3,10 +3,6 @@ import json import logging from typing import Mapping, Tuple -from aries_cloudagent.protocols.issue_credential.v2_0.manager import ( - V20CredManager, - V20CredManagerError, -) from aries_cloudagent.protocols.issue_credential.v2_0.models.detail.vc_di import ( V20CredExRecordVCDI, ) @@ -25,15 +21,12 @@ from ......anoncreds.issuer import ( AnonCredsIssuer, ) -from ......indy.models.cred import IndyCredentialSchema from ......indy.models.cred_abstract import ( - IndyCredAbstractSchema, VCDICredAbstract, VCDICredAbstractSchema, ) from ......indy.models.cred_request import ( BindingProof, - IndyCredRequestSchema, VCDICredRequest, VCDICredRequestSchema, ) diff --git a/demo/features/environment.py b/demo/features/environment.py index abfe5be201..9ca2e341c3 100644 --- a/demo/features/environment.py +++ b/demo/features/environment.py @@ -1,4 +1,3 @@ -from runners.agent_container import AgentContainer from bdd_support.agent_backchannel_client import ( aries_container_terminate, ) diff --git a/demo/features/steps/0453-issue-credential.py b/demo/features/steps/0453-issue-credential.py index 5ccee12d5b..8046ca565f 100644 --- a/demo/features/steps/0453-issue-credential.py +++ b/demo/features/steps/0453-issue-credential.py @@ -1,7 +1,5 @@ from behave import given, when, then import json -from time import sleep -import time from bdd_support.agent_backchannel_client import ( aries_container_create_schema_cred_def, @@ -15,13 +13,8 @@ agent_container_POST, async_sleep, ) -from runners.agent_container import AgentContainer from runners.support.agent import ( - CRED_FORMAT_INDY, - CRED_FORMAT_JSON_LD, - DID_METHOD_SOV, DID_METHOD_KEY, - KEY_TYPE_ED255, KEY_TYPE_BLS, SIG_TYPE_BLS, ) diff --git a/demo/features/steps/0454-present-proof.py b/demo/features/steps/0454-present-proof.py index 030dd42f44..b7bb35bdc9 100644 --- a/demo/features/steps/0454-present-proof.py +++ b/demo/features/steps/0454-present-proof.py @@ -1,24 +1,13 @@ -from behave import given, when, then -import json -from time import sleep -import time +from behave import when, then from bdd_support.agent_backchannel_client import ( read_proof_req_data, - read_presentation_data, aries_container_request_proof, aries_container_verify_proof, agent_container_POST, async_sleep, ) -from runners.agent_container import AgentContainer from runners.support.agent import ( - CRED_FORMAT_INDY, - CRED_FORMAT_JSON_LD, - DID_METHOD_SOV, - DID_METHOD_KEY, - KEY_TYPE_ED255, - KEY_TYPE_BLS, SIG_TYPE_BLS, ) diff --git a/demo/features/steps/revocation-api.py b/demo/features/steps/revocation-api.py index d7cfc271a8..419ac0f411 100644 --- a/demo/features/steps/revocation-api.py +++ b/demo/features/steps/revocation-api.py @@ -1,4 +1,4 @@ -from behave import given, when, then +from behave import given, then import json import os @@ -7,7 +7,6 @@ agent_container_POST, async_sleep, ) -from runners.agent_container import AgentContainer BDD_EXTRA_AGENT_ARGS = os.getenv("BDD_EXTRA_AGENT_ARGS") @@ -30,18 +29,18 @@ def step_impl(context, issuer, count=None): agent = context.active_agents[issuer] async_sleep(5.0) created_response = agent_container_GET( - agent["agent"], f"/revocation/registries/created" + agent["agent"], "/revocation/registries/created" ) full_response = agent_container_GET( - agent["agent"], f"/revocation/registries/created", params={"state": "full"} + agent["agent"], "/revocation/registries/created", params={"state": "full"} ) decommissioned_response = agent_container_GET( agent["agent"], - f"/revocation/registries/created", + "/revocation/registries/created", params={"state": "decommissioned"}, ) finished_response = agent_container_GET( - agent["agent"], f"/revocation/registries/created", params={"state": "finished"} + agent["agent"], "/revocation/registries/created", params={"state": "finished"} ) async_sleep(4.0) if count: diff --git a/demo/features/steps/taa-txn-author-agreement.py b/demo/features/steps/taa-txn-author-agreement.py index 6dc43cc22f..ee15b250e1 100644 --- a/demo/features/steps/taa-txn-author-agreement.py +++ b/demo/features/steps/taa-txn-author-agreement.py @@ -1,13 +1,9 @@ from behave import given, when, then -import json -from time import sleep -import time from bdd_support.agent_backchannel_client import ( agent_container_GET, agent_container_POST, agent_container_PUT, - async_sleep, ) diff --git a/demo/playground/examples/tests/__init__.py b/demo/playground/examples/tests/__init__.py index c35b313157..8ac051d11c 100644 --- a/demo/playground/examples/tests/__init__.py +++ b/demo/playground/examples/tests/__init__.py @@ -3,7 +3,6 @@ from functools import wraps import logging import os -import time import pytest import requests diff --git a/demo/playground/examples/tests/test_mediator_ping_agents.py b/demo/playground/examples/tests/test_mediator_ping_agents.py index 9f37e58e89..2b5f408d0b 100644 --- a/demo/playground/examples/tests/test_mediator_ping_agents.py +++ b/demo/playground/examples/tests/test_mediator_ping_agents.py @@ -99,7 +99,7 @@ def initialize_mediation(agent: Agent, invitation): @pytest.fixture(scope="session") def faber_mediator(faber, mediation_invite): - logger.info(f"faber_mediator...") + logger.info("faber_mediator...") result = initialize_mediation(faber, mediation_invite) logger.info(f"...faber_mediator = {result}") yield result @@ -107,7 +107,7 @@ def faber_mediator(faber, mediation_invite): @pytest.fixture(scope="session") def alice_mediator(alice, mediation_invite): - logger.info(f"alice_mediator...") + logger.info("alice_mediator...") result = initialize_mediation(alice, mediation_invite) logger.info(f"...alice_mediator = {result}") yield result @@ -115,7 +115,7 @@ def alice_mediator(alice, mediation_invite): @pytest.fixture(scope="session") def multi_one_mediator(multi_one, mediation_invite): - logger.info(f"multi_one_mediator...") + logger.info("multi_one_mediator...") result = initialize_mediation(multi_one, mediation_invite) logger.info(f"...multi_one_mediator = {result}") yield result diff --git a/demo/playground/examples/tests/test_ping_agents.py b/demo/playground/examples/tests/test_ping_agents.py index b7c05b7461..86e7b65188 100644 --- a/demo/playground/examples/tests/test_ping_agents.py +++ b/demo/playground/examples/tests/test_ping_agents.py @@ -40,7 +40,7 @@ def alice_faber_connection(faber, alice): logger.info("faber create invitation to alice") invite = faber.create_invitation(auto_accept="true")["invitation"] logger.info(f"invitation = {invite}") - logger.info(f"alice receive invitation") + logger.info("alice receive invitation") resp = alice.receive_invite(invite, auto_accept="true") result = resp["connection_id"] logger.info(f"alice/faber connection_id = {result}") @@ -53,7 +53,7 @@ def faber_alice_connection(faber, alice): logger.info("alice create invitation to faber") invite = alice.create_invitation(auto_accept="true")["invitation"] logger.info(f"invitation = {invite}") - logger.info(f"faber receive invitation") + logger.info("faber receive invitation") resp = faber.receive_invite(invite, auto_accept="true") result = resp["connection_id"] logger.info(f"faber/alice connection_id = {result}") @@ -66,7 +66,7 @@ def alice_multi_one_connection(multi_one, alice): logger.info("multi_one create invitation to alice") invite = multi_one.create_invitation(auto_accept="true")["invitation"] logger.info(f"invitation = {invite}") - logger.info(f"alice receive invitation") + logger.info("alice receive invitation") resp = alice.receive_invite(invite, auto_accept="true") result = resp["connection_id"] logger.info(f"alice/multi_one connection_id = {result}") @@ -79,7 +79,7 @@ def multi_one_alice_connection(multi_one, alice): logger.info("alice create invitation to multi_one") invite = alice.create_invitation(auto_accept="true")["invitation"] logger.info(f"invitation = {invite}") - logger.info(f"faber receive invitation") + logger.info("faber receive invitation") resp = multi_one.receive_invite(invite, auto_accept="true") result = resp["connection_id"] logger.info(f"multi_one/alice connection_id = {result}") diff --git a/demo/runners/acme.py b/demo/runners/acme.py index 898ff72846..a420321342 100644 --- a/demo/runners/acme.py +++ b/demo/runners/acme.py @@ -1,9 +1,7 @@ import asyncio -import json import logging import os import sys -from aiohttp import ClientError sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) # noqa @@ -16,7 +14,6 @@ check_requires, log_msg, log_status, - log_timer, prompt, prompt_loop, ) diff --git a/demo/runners/support/agent.py b/demo/runners/support/agent.py index d800341221..2e937af9d9 100644 --- a/demo/runners/support/agent.py +++ b/demo/runners/support/agent.py @@ -1061,17 +1061,17 @@ async def handle_problem_report(self, message): ) async def handle_endorse_transaction(self, message): - self.log(f"Received endorse transaction ...\n", source="stderr") + self.log("Received endorse transaction ...\n", source="stderr") async def handle_revocation_registry(self, message): reg_id = message.get("revoc_reg_id", "(undetermined)") self.log(f"Revocation registry: {reg_id} state: {message['state']}") async def handle_mediation(self, message): - self.log(f"Received mediation message ...\n") + self.log("Received mediation message ...\n") async def handle_keylist(self, message): - self.log(f"Received handle_keylist message ...\n") + self.log("Received handle_keylist message ...\n") self.log(json.dumps(message)) async def taa_accept(self): diff --git a/demo/runners/support/utils.py b/demo/runners/support/utils.py index 77e5d7792f..7a7abf96dc 100644 --- a/demo/runners/support/utils.py +++ b/demo/runners/support/utils.py @@ -115,7 +115,7 @@ def output_reader(handle, callback, *args, **kwargs): break try: run_in_terminal(functools.partial(callback, line, *args)) - except AssertionError as e: + except AssertionError: # see comment in DemoAgent.handle_output # trace log and prompt_toolkit do not get along... pass From 3089b190b3e3f2f4a31cb0bb10acb6f8bcbe9af8 Mon Sep 17 00:00:00 2001 From: Sarthak Vijayvergiya <18403281+sarthakvijayvergiya@users.noreply.github.com> Date: Fri, 8 Mar 2024 08:53:14 +0100 Subject: [PATCH 69/69] fix test case added vc_di Signed-off-by: Sarthak Vijayvergiya <18403281+sarthakvijayvergiya@users.noreply.github.com> --- .../protocols/issue_credential/v2_0/tests/test_routes.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_routes.py b/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_routes.py index 0be8b63429..c611fcca7d 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_routes.py @@ -105,6 +105,7 @@ async def test_credential_exchange_list(self): "cred_ex_record": mock_cx_rec.serialize.return_value, "indy": None, "ld_proof": None, + "vc_di": None, } ] } @@ -1489,6 +1490,7 @@ async def test_credential_exchange_store_bad_cred_id_json(self): "cred_ex_record": mock_cx_rec.serialize.return_value, "indy": {"...": "..."}, "ld_proof": None, + "vd_di": None, } )