diff --git a/aries_cloudagent/config/default_context.py b/aries_cloudagent/config/default_context.py index e2e4dc15b4..7bbb2afabb 100644 --- a/aries_cloudagent/config/default_context.py +++ b/aries_cloudagent/config/default_context.py @@ -139,6 +139,7 @@ async def load_plugins(self, context: InjectionContext): plugin_registry.register_plugin("aries_cloudagent.resolver") plugin_registry.register_plugin("aries_cloudagent.settings") plugin_registry.register_plugin("aries_cloudagent.vc") + plugin_registry.register_plugin("aries_cloudagent.vc.data_integrity") plugin_registry.register_plugin("aries_cloudagent.wallet") plugin_registry.register_plugin("aries_cloudagent.wallet.keys") diff --git a/aries_cloudagent/vc/data_integrity/__init__.py b/aries_cloudagent/vc/data_integrity/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/aries_cloudagent/vc/data_integrity/cryptosuites/__init__.py b/aries_cloudagent/vc/data_integrity/cryptosuites/__init__.py new file mode 100644 index 0000000000..c838776fe1 --- /dev/null +++ b/aries_cloudagent/vc/data_integrity/cryptosuites/__init__.py @@ -0,0 +1,9 @@ +from .eddsa_jcs_2022 import EddsaJcs2022 + +CRYPTOSUITES = { + "eddsa-jcs-2022": EddsaJcs2022, +} + +__all__ = [ + "EddsaJcs2022", +] diff --git a/aries_cloudagent/vc/data_integrity/cryptosuites/eddsa_jcs_2022.py b/aries_cloudagent/vc/data_integrity/cryptosuites/eddsa_jcs_2022.py new file mode 100644 index 0000000000..69daaa9334 --- /dev/null +++ b/aries_cloudagent/vc/data_integrity/cryptosuites/eddsa_jcs_2022.py @@ -0,0 +1,204 @@ +"""EddsaJcs2022 cryptosuite.""" + +from hashlib import sha256 +import canonicaljson + +from ....wallet.base import BaseWallet +from ....wallet.keys.manager import ( + MultikeyManager, + multikey_to_verkey, + key_type_from_multikey, +) +from ....utils.multiformats import multibase +from ....core.profile import ProfileSession +from ....core.error import BaseError +from ..models.options import DataIntegrityProofOptions +from ..models.proof import DataIntegrityProof +from ..models.verification_response import ProblemDetails, DataIntegrityVerificationResult +from ..errors import PROBLEM_DETAILS +from datetime import datetime + + +class CryptosuiteError(BaseError): + """Generic Cryptosuite Error.""" + + +class EddsaJcs2022: + """EddsaJcs2022 cryptosuite. + + https://www.w3.org/TR/vc-di-eddsa/#eddsa-jcs-2022. + """ + + def __init__(self, *, session: ProfileSession): + """Create new EddsaJcs2022 Cryptosuite instance. + + Args: + session: ProfileSession to use during crypto operations. + + """ + super().__init__() + self.session = session + self.wallet = session.inject(BaseWallet) + self.key_manager = MultikeyManager(session) + + async def create_proof( + self, unsecured_data_document: dict, options: DataIntegrityProofOptions + ): + """Create proof algorithm. + + https://www.w3.org/TR/vc-di-eddsa/#create-proof-eddsa-jcs-2022. + """ + proof = DataIntegrityProof.deserialize(options.serialize().copy()) + + # Spec says to copy document context to the proof but it's unecessary IMO, + # commenting out for the time being... + + # if '@context' in unsecured_data_document: + # proof['@context'] = unsecured_data_document['@context'] + + proof_config = self.proof_configuration(proof) + transformed_data = self.transformation(unsecured_data_document, options) + hash_data = self.hashing(transformed_data, proof_config) + proof_bytes = await self.proof_serialization(hash_data, options) + + proof.proof_value = multibase.encode(proof_bytes, "base58btc") + + return proof + + def proof_configuration(self, options: DataIntegrityProofOptions): + """Proof configuration algorithm. + + https://www.w3.org/TR/vc-di-eddsa/#proof-configuration-eddsa-jcs-2022. + """ + proof_config = options + assert ( + proof_config.type == "DataIntegrityProof" + ), 'Expected proof.type to be "DataIntegrityProof' + assert ( + proof_config.cryptosuite == "eddsa-jcs-2022" + ), 'Expected proof.cryptosuite to be "eddsa-jcs-2022' + + if proof_config.created: + assert datetime.fromisoformat(proof_config.created) + + if proof_config.expires: + assert datetime.fromisoformat(proof_config.expires) + + return self._canonicalize(proof_config.serialize()) + + def transformation( + self, unsecured_document: dict, options: DataIntegrityProofOptions + ): + """Transformation algorithm. + + https://www.w3.org/TR/vc-di-eddsa/#transformation-eddsa-jcs-2022. + """ + assert ( + options.type == "DataIntegrityProof" + ), "Expected proof.type to be `DataIntegrityProof`" + assert ( + options.cryptosuite == "eddsa-jcs-2022" + ), "Expected proof.cryptosuite to be `eddsa-jcs-2022`" + + return self._canonicalize(unsecured_document) + + def hashing(self, transformed_document: bytes, canonical_proof_config: bytes): + """Hashing algorithm. + + https://www.w3.org/TR/vc-di-eddsa/#hashing-eddsa-jcs-2022. + """ + return ( + sha256(canonical_proof_config).digest() + + sha256(transformed_document).digest() + ) + + async def proof_serialization( + self, hash_data: bytes, options: DataIntegrityProofOptions + ): + """Proof Serialization Algorithm. + + https://www.w3.org/TR/vc-di-eddsa/#proof-serialization-eddsa-jcs-2022. + """ + # TODO encapsulate in a key manager method + if options.verification_method.startswith("did:key:"): + multikey = options.verification_method.split("#")[-1] + key_info = await self.key_manager.from_multikey(multikey) + + else: + key_info = await self.key_manager.from_kid(options.verification_method) + + return await self.wallet.sign_message( + message=hash_data, + from_verkey=multikey_to_verkey(key_info["multikey"]), + ) + + def _canonicalize(self, data: dict): + """Json canonicalization.""" + return canonicaljson.encode_canonical_json(data) + + async def verify_proof(self, secured_document: dict): + """Verify proof algorithm. + + https://www.w3.org/TR/vc-di-eddsa/#verify-proof-eddsa-jcs-2022. + """ + unsecured_document = secured_document.copy() + proof = unsecured_document.pop("proof") + proof_options = proof.copy() + proof_bytes = multibase.decode(proof_options.pop("proofValue")) + + try: + # Currently leaving context processing out of scope, + # leaving code commented as it's technically an algorithm step. + # Due to the cryptosuite being based on JSON canonicalization, + # the integrity of the document is protected without RDF processing. + + # https://www.w3.org/TR/vc-data-integrity/#validating-contexts + + # assert secured_document['@context'] == proof_options['@context'] + # unsecured_document['@context'] = proof_options['@context'] + + proof_options = DataIntegrityProofOptions.deserialize(proof_options) + transformed_data = self.transformation(unsecured_document, proof_options) + proof_config = self.proof_configuration(proof_options) + hash_data = self.hashing(transformed_data, proof_config) + verified = await self.proof_verification( + hash_data, proof_bytes, proof_options + ) + if not verified: + raise CryptosuiteError("Invalid signature.") + + except CryptosuiteError as err: + problem_detail = ProblemDetails.deserialize( + PROBLEM_DETAILS["PROOF_VERIFICATION_ERROR"] + ) + problem_detail.detail = str(err) + return DataIntegrityVerificationResult( + verified=False, + proof=DataIntegrityProof.deserialize(proof), + problem_details=[problem_detail], + ) + + return DataIntegrityVerificationResult( + verified=True, + proof=DataIntegrityProof.deserialize(proof), + problem_details=[], + ) + + async def proof_verification( + self, hash_data: bytes, proof_bytes: bytes, options: DataIntegrityProofOptions + ): + """Proof verification algorithm. + + https://www.w3.org/TR/vc-di-eddsa/#proof-verification-eddsa-jcs-2022. + """ + multikey = await MultikeyManager( + self.session + ).resolve_multikey_from_verification_method(options.verification_method) + verkey = multikey_to_verkey(multikey) + key_type = key_type_from_multikey(multikey) + return await self.wallet.verify_message( + message=hash_data, + signature=proof_bytes, + from_verkey=verkey, + key_type=key_type, + ) diff --git a/aries_cloudagent/vc/data_integrity/errors.py b/aries_cloudagent/vc/data_integrity/errors.py new file mode 100644 index 0000000000..c0be1ab95f --- /dev/null +++ b/aries_cloudagent/vc/data_integrity/errors.py @@ -0,0 +1,18 @@ +"""Problem Details objects for error handling.""" + +# https://www.w3.org/TR/vc-data-integrity/#processing-errors +PROBLEM_DETAILS = { + "PROOF_GENERATION_ERROR": { + "type": "https://w3id.org/security#PROOF_GENERATION_ERROR" + }, + "PROOF_VERIFICATION_ERROR": { + "type": "https://w3id.org/security#PROOF_VERIFICATION_ERROR" + }, + "PROOF_TRANSFORMATION_ERROR": { + "type": "https://w3id.org/security#PROOF_TRANSFORMATION_ERROR" + }, + "INVALID_DOMAIN_ERROR": {"type": "https://w3id.org/security#INVALID_DOMAIN_ERROR"}, + "INVALID_CHALLENGE_ERROR": { + "type": "https://w3id.org/security#INVALID_CHALLENGE_ERROR" + }, +} diff --git a/aries_cloudagent/vc/data_integrity/manager.py b/aries_cloudagent/vc/data_integrity/manager.py new file mode 100644 index 0000000000..401aafaab2 --- /dev/null +++ b/aries_cloudagent/vc/data_integrity/manager.py @@ -0,0 +1,150 @@ +"""DataIntegrity class.""" + +from ...core.profile import ProfileSession +from ...core.error import BaseError +from ...resolver.base import DIDNotFound +from .cryptosuites import EddsaJcs2022 +from .models.proof import DataIntegrityProof +from .models.options import DataIntegrityProofOptions +from .models.verification_response import ( + DataIntegrityVerificationResponse, + DataIntegrityVerificationResult, + ProblemDetails, +) +from .errors import PROBLEM_DETAILS + +from datetime import datetime + +CRYPTOSUITES = { + "eddsa-jcs-2022": EddsaJcs2022, +} + +PROOF_TYPES = ["DataIntegrityProof"] + +PROOF_PURPOSES = [ + "authentication", + "assertionMethod", +] + + +class DataIntegrityManagerError(BaseError): + """Generic DataIntegrityManager Error.""" + + +class DataIntegrityManager: + """Class for managing data integrity proofs.""" + + def __init__(self, session: ProfileSession): + """Initialize the DataIntegrityManager.""" + self.session = session + + async def add_proof(self, document: dict, options: DataIntegrityProofOptions): + """Data integrity add proof algorithm. + + https://www.w3.org/TR/vc-data-integrity/#add-proof. + """ + self.validate_proof_options(options) + suite = self.select_suite(options) + + # Capture existing proofs if any + all_proofs = document.pop("proof", []) + if not isinstance(all_proofs, list) and not isinstance(all_proofs, dict): + raise DataIntegrityManagerError("Expected proof to be a list or an object.") + + all_proofs = [all_proofs] if isinstance(all_proofs, dict) else all_proofs + + # Create secured document and create new proof + secured_document = document.copy() + secured_document["proof"] = all_proofs + proof = await suite.create_proof(document, options) + secured_document["proof"].append(proof.serialize()) + return secured_document + + async def verify_proof(self, secured_document: dict): + """Verify a proof attached to a secured document. + + https://www.w3.org/TR/vc-data-integrity/#verify-proof. + """ + unsecured_document = secured_document.copy() + all_proofs = unsecured_document.pop("proof") + all_proofs = all_proofs if isinstance(all_proofs, list) else [all_proofs] + verification_results = [] + for proof in all_proofs: + proof_options = proof.copy() + proof_options.pop("proofValue") + proof_options = DataIntegrityProofOptions.deserialize(proof_options) + try: + self.validate_proof_options(proof_options) + suite = self.select_suite(proof_options) + input_document = unsecured_document.copy() + input_document["proof"] = proof + verification_result = await suite.verify_proof(input_document) + + except (DataIntegrityManagerError, DIDNotFound) as err: + problem_detail = ProblemDetails.deserialize( + PROBLEM_DETAILS["PROOF_VERIFICATION_ERROR"] + ) + problem_detail.detail = str(err) + verification_result = DataIntegrityVerificationResult( + verified=False, + proof=DataIntegrityProof.deserialize(proof), + problem_details=[problem_detail], + ) + verification_results.append(verification_result) + return DataIntegrityVerificationResponse( + verified=( + True if all(result.verified for result in verification_results) else False + ), + verified_document=unsecured_document, + results=verification_results, + ) + + def select_suite(self, options: DataIntegrityProofOptions): + """Instanciate a cryptographic suite. + + https://www.w3.org/TR/vc-data-integrity/#cryptographic-suites. + """ + if options.type == "DataIntegrityProof": + suite = CRYPTOSUITES[options.cryptosuite](session=self.session) + + elif options.type in PROOF_TYPES: + # TODO add support for Ed25519Signature2020 + pass + + else: + raise DataIntegrityManagerError(f"Unsupported proof type {options.type}") + return suite + + def validate_proof_options(self, proof_options: DataIntegrityProofOptions): + """Generic proof assertions for a data integrity proof options.""" + if proof_options.created: + try: + datetime.fromisoformat(proof_options.created) + except ValueError: + raise DataIntegrityManagerError( + f"Invalid proof creation datetime format {proof_options.created}" + ) + if proof_options.expires: + try: + datetime.fromisoformat(proof_options.expires) + except ValueError: + raise DataIntegrityManagerError( + f"Invalid proof expiration datetime format {proof_options.expires}" + ) + if proof_options.type not in PROOF_TYPES: + raise DataIntegrityManagerError( + f"Unsupported proof type {proof_options.type}" + ) + if proof_options.type == "DataIntegrityProof": + if not proof_options.cryptosuite: + raise DataIntegrityManagerError( + "DataIntegrityProof must specify a cryptosuite." + ) + if proof_options.cryptosuite not in CRYPTOSUITES: + raise DataIntegrityManagerError( + f"Unsupported cryptosuite {proof_options.cryptosuite}" + ) + if proof_options.proof_purpose not in PROOF_PURPOSES: + raise DataIntegrityManagerError( + f"Unsupported proof purpose {proof_options.proof_purpose}" + ) diff --git a/aries_cloudagent/vc/data_integrity/models/__init__.py b/aries_cloudagent/vc/data_integrity/models/__init__.py new file mode 100644 index 0000000000..c0b38b4a18 --- /dev/null +++ b/aries_cloudagent/vc/data_integrity/models/__init__.py @@ -0,0 +1,15 @@ +from .proof import DataIntegrityProof, DataIntegrityProofSchema +from .options import DataIntegrityProofOptions, DataIntegrityProofOptionsSchema +from .verification_response import ( + DataIntegrityVerificationResponseSchema, + DataIntegrityVerificationResponse, +) + +__all__ = [ + "DataIntegrityProof", + "DataIntegrityProofSchema", + "DataIntegrityProofOptions", + "DataIntegrityProofOptionsSchema", + "DataIntegrityVerificationResponse", + "DataIntegrityVerificationResponseSchema", +] diff --git a/aries_cloudagent/vc/data_integrity/models/options.py b/aries_cloudagent/vc/data_integrity/models/options.py new file mode 100644 index 0000000000..551c38e89f --- /dev/null +++ b/aries_cloudagent/vc/data_integrity/models/options.py @@ -0,0 +1,213 @@ +"""DataIntegrityProof.""" + +from typing import Optional + +from marshmallow import INCLUDE, fields, post_dump + +from ....messaging.models.base import BaseModel, BaseModelSchema +from ....messaging.valid import ( + RFC3339_DATETIME_EXAMPLE, + UUID4_EXAMPLE, + Uri, +) + + +class DataIntegrityProofOptions(BaseModel): + """Data Integrity Proof Options model.""" + + class Meta: + """DataIntegrityProofOptions metadata.""" + + schema_class = "DataIntegrityProofOptionsSchema" + + def __init__( + self, + id: Optional[str] = None, + type: Optional[str] = None, + proof_purpose: Optional[str] = None, + verification_method: Optional[str] = None, + cryptosuite: Optional[str] = None, + created: Optional[str] = None, + expires: Optional[str] = None, + domain: Optional[str] = None, + challenge: Optional[str] = None, + previous_proof: Optional[str] = None, + proof_value: Optional[str] = None, + nonce: Optional[str] = None, + **kwargs, + ) -> None: + """Initialize the DataIntegrityProofOptions instance.""" + + self.id = id + self.type = type + self.proof_purpose = proof_purpose + self.verification_method = verification_method + self.cryptosuite = cryptosuite + self.created = created + self.expires = expires + self.domain = domain + self.challenge = challenge + self.previous_proof = previous_proof + self.proof_value = proof_value + self.nonce = nonce + self.extra = kwargs + + +class DataIntegrityProofOptionsSchema(BaseModelSchema): + """Data Integrity Proof Options schema. + + Based on https://www.w3.org/TR/vc-data-integrity/#proofs + + """ + + class Meta: + """Accept parameter overload.""" + + unknown = INCLUDE + model_class = DataIntegrityProofOptions + + id = fields.Str( + required=False, + metadata={ + "description": ( + "An optional identifier for the proof, which MUST be a URL [URL], \ + such as a UUID as a URN" + ), + "example": "urn:uuid:6a1676b8-b51f-11ed-937b-d76685a20ff5", + }, + ) + + type = fields.Str( + required=True, + metadata={ + "description": ( + "The specific type of proof MUST be specified as a string that maps \ + to a URL [URL]." + ), + "example": "DataIntegrityProof", + }, + ) + + proof_purpose = fields.Str( + data_key="proofPurpose", + required=True, + metadata={ + "description": "The proof purpose acts as a safeguard to prevent the \ + proof from being misused by being applied to a purpose other than \ + the one that was intended.", + "example": "assertionMethod", + }, + ) + + verification_method = fields.Str( + data_key="verificationMethod", + required=True, + validate=Uri(), + metadata={ + "description": "A verification method is the means and information \ + needed to verify the proof. ", + "example": ( + "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL#z6Mkgg34" + "2Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" + ), + }, + ) + + cryptosuite = fields.Str( + required=True, + metadata={ + "description": ( + "An identifier for the cryptographic suite that can be used to \ + verify the proof." + ), + "example": "eddsa-jcs-2022", + }, + ) + + created = fields.Str( + required=False, + metadata={ + "description": ( + "The date and time the proof was created is OPTIONAL and, if \ + included, MUST be specified as an [XMLSCHEMA11-2] \ + dateTimeStamp string" + ), + "example": RFC3339_DATETIME_EXAMPLE, + }, + ) + + expires = fields.Str( + required=False, + metadata={ + "description": ( + "The expires property is OPTIONAL and, if present, specifies when \ + the proof expires. If present, it MUST be an [XMLSCHEMA11-2] \ + dateTimeStamp string" + ), + "example": RFC3339_DATETIME_EXAMPLE, + }, + ) + + domain = fields.Str( + required=False, + metadata={ + "description": ( + "It conveys one or more security domains in which the proof is \ + meant to be used." + ), + "example": "example.com", + }, + ) + + challenge = fields.Str( + required=False, + metadata={ + "description": ( + "The value is used once for a particular domain and window of time. \ + This value is used to mitigate replay attacks." + ), + "example": UUID4_EXAMPLE, + }, + ) + + previous_proof = fields.Str( + required=False, + data_key="previousProof", + metadata={ + "description": "Each value identifies another data integrity proof that \ + MUST verify before the current proof is processed.", + "example": ("urn:uuid:6a1676b8-b51f-11ed-937b-d76685a20ff5"), + }, + ) + + proof_value = fields.Str( + required=False, + data_key="proofValue", + metadata={ + "description": "The value of the proof signature.", + "example": ( + "zsy1AahqbzJQ63n9RtekmwzqZeVj494VppdAVJBnMYrTwft6cLJJGeTSSxCCJ6HKnR" + "twE7jjDh6sB2z2AAiZY9BBnCD8wUVgwqH3qchGRCuC2RugA4eQ9fUrR4Yuycac3caiaaay" + ), + }, + ) + + nonce = fields.Str( + required=False, + metadata={ + "description": "One use of this field is to increase privacy by decreasing \ + linkability that is the result of deterministically \ + generated signatures.", + "example": ( + "CF69iO3nfvqRsRBNElE8b4wO39SyJHPM7Gg1nExltW5vSfQA1lvDCR/zXX1To0/4NLo==" + ), + }, + ) + + @post_dump(pass_original=True) + def add_unknown_properties(self, data: dict, original, **kwargs): + """Add back unknown properties before outputting.""" + + data.update(original.extra) + + return data diff --git a/aries_cloudagent/vc/data_integrity/models/proof.py b/aries_cloudagent/vc/data_integrity/models/proof.py new file mode 100644 index 0000000000..f52c69c3ae --- /dev/null +++ b/aries_cloudagent/vc/data_integrity/models/proof.py @@ -0,0 +1,214 @@ +"""DataIntegrityProof.""" + +from typing import Optional + +from marshmallow import INCLUDE, fields, post_dump + +from ....messaging.models.base import BaseModel, BaseModelSchema +from ....messaging.valid import ( + RFC3339_DATETIME_EXAMPLE, + UUID4_EXAMPLE, + Uri, +) + + +class DataIntegrityProof(BaseModel): + """Data Integrity Proof model.""" + + class Meta: + """DataIntegrityProof metadata.""" + + schema_class = "DataIntegrityProofSchema" + + def __init__( + self, + id: Optional[str] = None, + type: Optional[str] = "DataIntegrityProof", + proof_purpose: Optional[str] = None, + verification_method: Optional[str] = None, + cryptosuite: Optional[str] = None, + created: Optional[str] = None, + expires: Optional[str] = None, + domain: Optional[str] = None, + challenge: Optional[str] = None, + proof_value: Optional[str] = None, + previous_proof: Optional[str] = None, + nonce: Optional[str] = None, + **kwargs, + ) -> None: + """Initialize the DataIntegrityProof instance.""" + + self.id = id + self.type = type + self.proof_purpose = proof_purpose + self.verification_method = verification_method + self.cryptosuite = cryptosuite + self.created = created + self.expires = expires + self.domain = domain + self.challenge = challenge + self.proof_value = proof_value + self.previous_proof = previous_proof + self.nonce = nonce + self.extra = kwargs + + +class DataIntegrityProofSchema(BaseModelSchema): + """Data Integrity Proof schema. + + Based on https://www.w3.org/TR/vc-data-integrity/#proofs + + """ + + class Meta: + """Accept parameter overload.""" + + unknown = INCLUDE + model_class = DataIntegrityProof + + id = fields.Str( + required=False, + metadata={ + "description": ( + "An optional identifier for the proof, which MUST be a URL [URL], \ + such as a UUID as a URN" + ), + "example": "urn:uuid:6a1676b8-b51f-11ed-937b-d76685a20ff5", + }, + ) + + type = fields.Str( + required=True, + metadata={ + "description": ( + "The specific type of proof MUST be specified as a string that maps \ + to a URL [URL]." + ), + "example": "DataIntegrityProof", + }, + ) + + proof_purpose = fields.Str( + data_key="proofPurpose", + required=True, + metadata={ + "description": "The proof purpose acts as a safeguard to prevent the proof \ + from being misused by being applied to a purpose other than the one that \ + was intended.", + "example": "assertionMethod", + }, + ) + + verification_method = fields.Str( + data_key="verificationMethod", + required=True, + validate=Uri(), + metadata={ + "description": "A verification method is the means and information needed \ + to verify the proof. ", + "example": ( + "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL#z6Mkgg34" + "2Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" + ), + }, + ) + + cryptosuite = fields.Str( + required=True, + metadata={ + "description": ( + "An identifier for the cryptographic suite that can be used to verify \ + the proof." + ), + "example": "eddsa-jcs-2022", + }, + ) + + created = fields.Str( + required=False, + metadata={ + "description": ( + "The date and time the proof was created is OPTIONAL and, if included, \ + MUST be specified as an [XMLSCHEMA11-2] dateTimeStamp string" + ), + "example": RFC3339_DATETIME_EXAMPLE, + }, + ) + + expires = fields.Str( + required=False, + metadata={ + "description": ( + "The expires property is OPTIONAL and, if present, specifies when the \ + proof expires. If present, it MUST be an [XMLSCHEMA11-2] \ + dateTimeStamp string" + ), + "example": RFC3339_DATETIME_EXAMPLE, + }, + ) + + domain = fields.Str( + required=False, + metadata={ + "description": ( + "It conveys one or more security domains in which the proof is \ + meant to be used." + ), + "example": "example.com", + }, + ) + + challenge = fields.Str( + required=False, + metadata={ + "description": ( + "The value is used once for a particular domain and window of time. \ + This value is used to mitigate replay attacks." + ), + "example": UUID4_EXAMPLE, + }, + ) + + proof_value = fields.Str( + required=False, + data_key="proofValue", + metadata={ + "description": "A string value that expresses base-encoded binary data \ + necessary to verify the digital proof using the verificationMethod \ + specified.", + "example": ( + "zsy1AahqbzJQ63n9RtekmwzqZeVj494VppdAVJBnMYrTwft6cLJJGeTSSxCCJ6HKnR" + "twE7jjDh6sB2z2AAiZY9BBnCD8wUVgwqH3qchGRCuC2RugA4eQ9fUrR4Yuycac3caiaaay" + ), + }, + ) + + previous_proof = fields.Str( + required=False, + data_key="previousProof", + metadata={ + "description": "Each value identifies another data integrity proof that \ + MUST verify before the current proof is processed.", + "example": ("urn:uuid:6a1676b8-b51f-11ed-937b-d76685a20ff5"), + }, + ) + + nonce = fields.Str( + required=False, + metadata={ + "description": "One use of this field is to increase privacy by decreasing \ + linkability that is the result of deterministically generated \ + signatures.", + "example": ( + "CF69iO3nfvqRsRBNElE8b4wO39SyJHPM7Gg1nExltW5vSfQA1lvDCR/zXX1To0/4NLo==" + ), + }, + ) + + @post_dump(pass_original=True) + def add_unknown_properties(self, data: dict, original, **kwargs): + """Add back unknown properties before outputting.""" + + data.update(original.extra) + + return data diff --git a/aries_cloudagent/vc/data_integrity/models/verification_response.py b/aries_cloudagent/vc/data_integrity/models/verification_response.py new file mode 100644 index 0000000000..f69d0b877e --- /dev/null +++ b/aries_cloudagent/vc/data_integrity/models/verification_response.py @@ -0,0 +1,161 @@ +"""DataIntegrityProof.""" + +from typing import Optional, List + +from marshmallow import INCLUDE, fields + +from ....messaging.models.base import BaseModel, BaseModelSchema +from .proof import DataIntegrityProof, DataIntegrityProofSchema + + +class ProblemDetails(BaseModel): + """ProblemDetails model.""" + + class Meta: + """ProblemDetails metadata.""" + + schema_class = "ProblemDetailsSchema" + + def __init__( + self, + type: Optional[str] = None, + title: Optional[str] = None, + detail: Optional[str] = None, + ) -> None: + """Initialize the ProblemDetails instance.""" + + self.type = type + self.title = title + self.detail = detail + + +class ProblemDetailsSchema(BaseModelSchema): + """ProblemDetails schema. + + Based on https://www.w3.org/TR/vc-data-model-2.0/#problem-details. + + """ + + class Meta: + """Accept parameter overload.""" + + unknown = INCLUDE + model_class = ProblemDetails + + type = fields.Str( + required=True, + metadata={ + "example": "https://w3id.org/security#PROOF_VERIFICATION_ERROR", + }, + ) + + title = fields.Str( + required=False, + metadata={}, + ) + + detail = fields.Str( + required=False, + metadata={}, + ) + + +class DataIntegrityVerificationResult(BaseModel): + """Data Integrity Verification Result model.""" + + class Meta: + """DataIntegrityVerificationResult metadata.""" + + schema_class = "DataIntegrityVerificationResultSchema" + + def __init__( + self, + verified: Optional[bool] = None, + proof: Optional[DataIntegrityProof] = None, + problem_details: Optional[List[ProblemDetails]] = None, + ) -> None: + """Initialize the DataIntegrityVerificationResult instance.""" + + self.verified = verified + self.proof = proof + self.problem_details = problem_details + + +class DataIntegrityVerificationResultSchema(BaseModelSchema): + """DataIntegrityVerificationResult schema.""" + + class Meta: + """Accept parameter overload.""" + + unknown = INCLUDE + model_class = DataIntegrityVerificationResult + + verified = fields.Bool( + required=True, + metadata={ + "example": False, + }, + ) + + proof = fields.Nested( + DataIntegrityProofSchema(), + required=True, + metadata={}, + ) + + problem_details = fields.List( + fields.Nested(ProblemDetailsSchema()), + data_key="problemDetails", + required=True, + metadata={}, + ) + + +class DataIntegrityVerificationResponse(BaseModel): + """Data Integrity Verification Response model.""" + + class Meta: + """DataIntegrityVerificationResponse metadata.""" + + schema_class = "DataIntegrityVerificationResponseSchema" + + def __init__( + self, + verified: Optional[bool] = None, + verified_document: Optional[dict] = None, + results: Optional[List[DataIntegrityVerificationResult]] = None, + ) -> None: + """Initialize the DataIntegrityVerificationResponse instance.""" + + self.verified = verified + self.verified_document = verified_document + self.results = results + + +class DataIntegrityVerificationResponseSchema(BaseModelSchema): + """DataIntegrityVerificationResponse schema.""" + + class Meta: + """Accept parameter overload.""" + + unknown = INCLUDE + model_class = DataIntegrityVerificationResponse + + verified = fields.Bool( + required=True, + metadata={ + "example": False, + }, + ) + + verified_document = fields.Dict( + data_key="verifiedDocument", + required=False, + metadata={}, + ) + + results = fields.List( + fields.Nested(DataIntegrityVerificationResultSchema()), + required=False, + metadata={}, + ) diff --git a/aries_cloudagent/vc/data_integrity/routes.py b/aries_cloudagent/vc/data_integrity/routes.py new file mode 100644 index 0000000000..55bdf3bdcc --- /dev/null +++ b/aries_cloudagent/vc/data_integrity/routes.py @@ -0,0 +1,153 @@ +"""Data Integrity admin routes.""" + +import logging + +from aiohttp import web +from aiohttp_apispec import docs, request_schema, response_schema +from marshmallow import fields + + +from ...admin.decorators.auth import tenant_authentication +from ...admin.request_context import AdminRequestContext +from ...messaging.models.openapi import OpenAPISchema +from .manager import DataIntegrityManager, DataIntegrityManagerError +from .models import DataIntegrityProofOptionsSchema, DataIntegrityProofOptions +from ...wallet.error import WalletError + +LOGGER = logging.getLogger(__name__) + + +class AddProofSchema(OpenAPISchema): + """Request schema to add a DI proof to a document.""" + + document = fields.Dict(required=True, metadata={"example": {"hello": "world"}}) + options = fields.Nested( + DataIntegrityProofOptionsSchema, + metadata={ + "example": { + "type": "DataIntegrityProof", + "cryptosuite": "eddsa-jcs-2022", + "proofPurpose": "assertionMethod", + "verificationMethod": "did:web:example.com#key-01", + } + }, + ) + + +class AddProofResponseSchema(OpenAPISchema): + """Response schema to adding a DI proof to a document.""" + + secured_document = fields.Dict( + required=True, metadata={"example": {"hello": "world"}} + ) + + +class VerifyDiRequestSchema(OpenAPISchema): + """Request schema to verify a document secured with a data integrity proof.""" + + secured_document = fields.Dict( + data_key="securedDocument", + required=True, + metadata={ + "example": { + "hello": "world", + "proof": [ + { + "cryptosuite": "eddsa-jcs-2022", + "proofPurpose": "assertionMethod", + "type": "DataIntegrityProof", + "verificationMethod": "did:key:\ + z6MksxraKwH8GR7NKeQ4HVZAeRKvD76kfd6G7jm8MscbDmy8#\ + z6MksxraKwH8GR7NKeQ4HVZAeRKvD76kfd6G7jm8MscbDmy8", + "proofValue": "zHtda8vV7kJQUPfSKiTGSQDhZfhkgtpnVziT7cdEzhu\ + fjPjbeRmysHvizMJEox1eHR7xUGzNUj1V4yaKiLw7UA6E", + } + ], + } + }, + ) + + +class VerifyDiResponseSchema(OpenAPISchema): + """Request schema to verifying a document secured with a data integrity proof.""" + + verified = fields.Bool(metadata={"description": "Verified", "example": True}) + + +@docs(tags=["vc"], summary="Add a DataIntegrityProof to a document.") +@request_schema(AddProofSchema()) +@response_schema(AddProofResponseSchema(), description="") +@tenant_authentication +async def add_di_proof(request: web.BaseRequest): + """Request handler for creating di proofs. + + Args: + request: aiohttp request object + + """ + context: AdminRequestContext = request["context"] + body = await request.json() + + document = body.get("document") + options = body.get("options") + + try: + options = DataIntegrityProofOptions.deserialize(options) + async with context.session() as session: + secured_document = await DataIntegrityManager(session).add_proof( + document, options + ) + + return web.json_response({"securedDocument": secured_document}, status=201) + + except (WalletError, DataIntegrityManagerError) as err: + raise web.HTTPBadRequest(reason=err.roll_up) from err + + +@docs(tags=["vc"], summary="Verify a document secured with a data integrity proof.") +@request_schema(VerifyDiRequestSchema()) +@response_schema(VerifyDiResponseSchema(), description="") +@tenant_authentication +async def verify_di_secured_document(request: web.BaseRequest): + """Request handler for verifying di proofs. + + Args: + request: aiohttp request object + + """ + context: AdminRequestContext = request["context"] + body = await request.json() + + secured_document = body.get("securedDocument") + + try: + async with context.session() as session: + verification_response = await DataIntegrityManager(session).verify_proof( + secured_document + ) + # response = { + # "verified": verification_response.verified, + # "verifiedDocument": verification_response.verified_document, + # "results": [result.serialize() for result in verification_response.results], + # } + if verification_response.verified: + return web.json_response( + {"verificationResults": verification_response.serialize()}, status=200 + ) + return web.json_response( + {"verificationResults": verification_response.serialize()}, status=400 + ) + + except (WalletError, DataIntegrityManagerError) as err: + raise web.HTTPBadRequest(reason=err.roll_up) from err + + +async def register(app: web.Application): + """Register routes.""" + + app.add_routes( + [ + web.post("/vc/di/add-proof", add_di_proof), + web.post("/vc/di/verify", verify_di_secured_document), + ] + ) diff --git a/aries_cloudagent/vc/data_integrity/tests/test_cryptosuites.py b/aries_cloudagent/vc/data_integrity/tests/test_cryptosuites.py new file mode 100644 index 0000000000..405076324c --- /dev/null +++ b/aries_cloudagent/vc/data_integrity/tests/test_cryptosuites.py @@ -0,0 +1,62 @@ +"""Test Data Integrity Cryptosuites.""" + +from unittest import IsolatedAsyncioTestCase +from aries_cloudagent.wallet.keys.manager import MultikeyManager +from aries_cloudagent.vc.data_integrity.cryptosuites import EddsaJcs2022 +from aries_cloudagent.core.in_memory import InMemoryProfile +from aries_cloudagent.vc.data_integrity.models.options import DataIntegrityProofOptions +from aries_cloudagent.resolver.default.key import KeyDIDResolver +from aries_cloudagent.resolver.default.web import WebDIDResolver +from aries_cloudagent.resolver.did_resolver import DIDResolver + + +class TestEddsaJcs2022(IsolatedAsyncioTestCase): + """Tests for DI sign and verify.""" + + async def asyncSetUp(self): + self.seed = "00000000000000000000000000000000" + self.multikey = "z6MkgKA7yrw5kYSiDuQFcye4bMaJpcfHFry3Bx45pdWh3s8i" + self.verification_method = f"did:key:{self.multikey}#{self.multikey}" + self.cryptosuite = "eddsa-jcs-2022" + self.unsecured_document = {"hello": "world"} + self.options = DataIntegrityProofOptions.deserialize( + { + "type": "DataIntegrityProof", + "cryptosuite": self.cryptosuite, + "proofPurpose": "assertionMethod", + "verificationMethod": self.verification_method, + } + ) + + self.resolver = DIDResolver() + self.resolver.register_resolver(KeyDIDResolver()) + self.resolver.register_resolver(WebDIDResolver()) + self.profile = InMemoryProfile.test_profile({}, bind={DIDResolver: self.resolver}) + try: + async with self.profile.session() as session: + await MultikeyManager(session=session).create(seed=self.seed) + except Exception: + pass + + async def test_create_proof(self): + async with self.profile.session() as session: + proof = await EddsaJcs2022(session=session).create_proof( + self.unsecured_document, self.options + ) + proof = proof.serialize() + assert isinstance(proof, dict) + assert proof["type"] == self.options.type + assert proof["cryptosuite"] == self.options.cryptosuite + assert proof["proofPurpose"] == self.options.proof_purpose + assert proof["verificationMethod"] == self.options.verification_method + assert proof["proofValue"] + + async def test_verify_proof(self): + async with self.profile.session() as session: + cryptosuite_instance = EddsaJcs2022(session=session) + proof = await cryptosuite_instance.create_proof( + self.unsecured_document, self.options + ) + secured_document = self.unsecured_document | {"proof": proof.serialize()} + verification = await cryptosuite_instance.verify_proof(secured_document) + assert verification.verified diff --git a/aries_cloudagent/vc/data_integrity/tests/test_manager.py b/aries_cloudagent/vc/data_integrity/tests/test_manager.py new file mode 100644 index 0000000000..65f266f93c --- /dev/null +++ b/aries_cloudagent/vc/data_integrity/tests/test_manager.py @@ -0,0 +1,109 @@ +"""Test DataIntegrityManager.""" + +from unittest import IsolatedAsyncioTestCase +from aries_cloudagent.wallet.keys.manager import MultikeyManager +from aries_cloudagent.core.in_memory import InMemoryProfile +from aries_cloudagent.vc.data_integrity.manager import DataIntegrityManager +from aries_cloudagent.vc.data_integrity.models.options import DataIntegrityProofOptions +from aries_cloudagent.resolver.default.key import KeyDIDResolver +from aries_cloudagent.resolver.default.web import WebDIDResolver +from aries_cloudagent.resolver.did_resolver import DIDResolver + + +class TestDiManager(IsolatedAsyncioTestCase): + """Tests for DI manager.""" + + async def asyncSetUp(self): + self.seed = "00000000000000000000000000000000" + self.multikey = "z6MkgKA7yrw5kYSiDuQFcye4bMaJpcfHFry3Bx45pdWh3s8i" + self.verification_method = f"did:key:{self.multikey}#{self.multikey}" + self.cryptosuite = "eddsa-jcs-2022" + self.unsecured_document = {"hello": "world"} + self.options = DataIntegrityProofOptions.deserialize( + { + "type": "DataIntegrityProof", + "cryptosuite": self.cryptosuite, + "proofPurpose": "assertionMethod", + "verificationMethod": self.verification_method, + } + ) + + self.resolver = DIDResolver() + self.resolver.register_resolver(KeyDIDResolver()) + self.resolver.register_resolver(WebDIDResolver()) + self.profile = InMemoryProfile.test_profile({}, bind={DIDResolver: self.resolver}) + try: + async with self.profile.session() as session: + await MultikeyManager(session=session).create(seed=self.seed) + except Exception: + pass + + async def test_add_proof(self): + async with self.profile.session() as session: + secured_document = await DataIntegrityManager(session=session).add_proof( + self.unsecured_document, self.options + ) + proof = secured_document.pop("proof", None) + assert isinstance(proof, list) + assert len(proof) == 1 + assert proof[0]["type"] == self.options.type + assert proof[0]["cryptosuite"] == self.options.cryptosuite + assert proof[0]["proofPurpose"] == self.options.proof_purpose + assert proof[0]["verificationMethod"] == self.options.verification_method + assert proof[0]["proofValue"] + + async def test_add_proof_set(self): + async with self.profile.session() as session: + secured_document = await DataIntegrityManager(session=session).add_proof( + self.unsecured_document, self.options + ) + secured_document_with_proof_set = await DataIntegrityManager( + session=session + ).add_proof(secured_document, self.options) + proof_set = secured_document_with_proof_set.pop("proof", None) + assert isinstance(proof_set, list) + assert len(proof_set) == 2 + for proof in proof_set: + assert proof["type"] == self.options.type + assert proof["cryptosuite"] == self.options.cryptosuite + assert proof["proofPurpose"] == self.options.proof_purpose + assert proof["verificationMethod"] == self.options.verification_method + assert proof["proofValue"] + + async def test_add_proof_chain(self): + pass + + async def test_verify_proof(self): + async with self.profile.session() as session: + di_manager = DataIntegrityManager(session=session) + secured_document = await di_manager.add_proof( + self.unsecured_document, self.options + ) + verification = await di_manager.verify_proof(secured_document) + assert verification.verified + bad_proof = secured_document["proof"][0].copy() + bad_proof["proofValue"] = bad_proof["proofValue"][:-1] + secured_document["proof"][0] = bad_proof + verification = await di_manager.verify_proof(secured_document) + assert not verification.verified + + async def test_verify_proof_set(self): + async with self.profile.session() as session: + di_manager = DataIntegrityManager(session=session) + secured_document = await di_manager.add_proof( + self.unsecured_document, self.options + ) + secured_document_with_proof_set = await di_manager.add_proof( + secured_document, self.options + ) + verification = await di_manager.verify_proof(secured_document_with_proof_set) + assert verification.verified + bad_proof = secured_document_with_proof_set["proof"][0].copy() + bad_proof["proofValue"] = bad_proof["proofValue"][:-1] + secured_document_with_proof_set["proof"][0] = bad_proof + verification = await di_manager.verify_proof(secured_document_with_proof_set) + assert not verification.verified + + async def test_verify_proof_chain(self): + # TODO, add tests once proof chain support is added + pass diff --git a/aries_cloudagent/wallet/askar.py b/aries_cloudagent/wallet/askar.py index bf024ab99d..69b5cb109e 100644 --- a/aries_cloudagent/wallet/askar.py +++ b/aries_cloudagent/wallet/askar.py @@ -94,10 +94,7 @@ async def create_key( if metadata is None: metadata = {} - if kid: - tags = {"kid": kid} - else: - tags = {} + tags = {"kid": kid} if kid else None try: keypair = _create_keypair(key_type, seed) @@ -193,7 +190,10 @@ async def get_signing_key(self, verkey: str) -> KeyInfo: raise WalletNotFoundError("Unknown key: {}".format(verkey)) metadata = json.loads(key.metadata or "{}") - kid = key.tags.get("kid") + try: + kid = key.tags.get("kid") + except Exception: + kid = None # FIXME implement key types return KeyInfo(verkey=verkey, metadata=metadata, key_type=ED25519, kid=kid) diff --git a/aries_cloudagent/wallet/keys/manager.py b/aries_cloudagent/wallet/keys/manager.py index 3bbbba245b..ba1094bf9f 100644 --- a/aries_cloudagent/wallet/keys/manager.py +++ b/aries_cloudagent/wallet/keys/manager.py @@ -6,13 +6,46 @@ from ..util import b58_to_bytes, bytes_to_b58 from ...utils.multiformats import multibase from ...wallet.error import WalletNotFoundError +from ...resolver.did_resolver import DIDResolver DEFAULT_ALG = "ed25519" ALG_MAPPINGS = { - "ed25519": {"key_type": ED25519, "prefix_hex": "ed01", "prefix_length": 2} + "ed25519": { + "key_type": ED25519, + "multikey_prefix": "z6Mk", + "prefix_hex": "ed01", + "prefix_length": 2, + } } +def multikey_to_verkey(multikey: str, alg: str = DEFAULT_ALG): + """Transform multikey to verkey.""" + + prefix_length = ALG_MAPPINGS[alg]["prefix_length"] + public_bytes = bytes(bytearray(multibase.decode(multikey))[prefix_length:]) + + return bytes_to_b58(public_bytes) + + +def verkey_to_multikey(verkey: str, alg: str = DEFAULT_ALG): + """Transform verkey to multikey.""" + + prefix_hex = ALG_MAPPINGS[alg]["prefix_hex"] + prefixed_key_hex = f"{prefix_hex}{b58_to_bytes(verkey).hex()}" + + return multibase.encode(bytes.fromhex(prefixed_key_hex), "base58btc") + + +def key_type_from_multikey(multikey: str): + """Derive key_type class from multikey prefix.""" + for mapping in ALG_MAPPINGS: + if multikey.startswith(ALG_MAPPINGS[mapping]["multikey_prefix"]): + return ALG_MAPPINGS[mapping]["key_type"] + + raise MultikeyManagerError(f"Unsupported key algorithm for multikey {multikey}.") + + class MultikeyManagerError(Exception): """Generic MultikeyManager Error.""" @@ -23,23 +56,37 @@ class MultikeyManager: def __init__(self, session: ProfileSession): """Initialize the MultikeyManager.""" + self.session: ProfileSession = session self.wallet: BaseWallet = session.inject(BaseWallet) - def _multikey_to_verkey(self, multikey: str, alg: str = DEFAULT_ALG): - """Transform multikey to verkey.""" + async def resolve_multikey_from_verification_method(self, kid: str): + """Derive a multikey from the verification method.""" + resolver = self.session.inject(DIDResolver) + verification_method = await resolver.dereference( + profile=self.session.profile, did_url=kid + ) - prefix_length = ALG_MAPPINGS[alg]["prefix_length"] - public_bytes = bytes(bytearray(multibase.decode(multikey))[prefix_length:]) + if verification_method.type == "Multikey": + multikey = verification_method.public_key_multibase - return bytes_to_b58(public_bytes) + elif verification_method.type == "Ed25519VerificationKey2018": + multikey = verkey_to_multikey(verification_method.public_key_base58) - def _verkey_to_multikey(self, verkey: str, alg: str = DEFAULT_ALG): - """Transform verkey to multikey.""" + elif verification_method.type == "Ed25519VerificationKey2020": + multikey = verkey_to_multikey(verification_method.public_key_multibase) - prefix_hex = ALG_MAPPINGS[alg]["prefix_hex"] - prefixed_key_hex = f"{prefix_hex}{b58_to_bytes(verkey).hex()}" + else: + raise MultikeyManagerError("Unknown verification method type.") - return multibase.encode(bytes.fromhex(prefixed_key_hex), "base58btc") + return multikey + + def key_type_from_multikey(self, multikey: str): + """Derive key_type class from multikey prefix.""" + for mapping in ALG_MAPPINGS: + if multikey.startswith(ALG_MAPPINGS[mapping]["multikey_prefix"]): + return ALG_MAPPINGS[mapping]["key_type"] + + raise MultikeyManagerError(f"Unsupported key algorithm for multikey {multikey}.") async def kid_exists(self, kid: str): """Check if kid exists.""" @@ -60,19 +107,17 @@ async def from_kid(self, kid: str): return { "kid": key_info.kid, - "multikey": self._verkey_to_multikey(key_info.verkey), + "multikey": verkey_to_multikey(key_info.verkey), } async def from_multikey(self, multikey: str): """Fetch a single key.""" - key_info = await self.wallet.get_signing_key( - verkey=self._multikey_to_verkey(multikey) - ) + key_info = await self.wallet.get_signing_key(verkey=multikey_to_verkey(multikey)) return { "kid": key_info.kid, - "multikey": self._verkey_to_multikey(key_info.verkey), + "multikey": verkey_to_multikey(key_info.verkey), } async def create(self, seed: str = None, kid: str = None, alg: str = DEFAULT_ALG): @@ -91,7 +136,7 @@ async def create(self, seed: str = None, kid: str = None, alg: str = DEFAULT_ALG return { "kid": key_info.kid, - "multikey": self._verkey_to_multikey(key_info.verkey), + "multikey": verkey_to_multikey(key_info.verkey), } async def update(self, multikey: str, kid: str): @@ -101,10 +146,10 @@ async def update(self, multikey: str, kid: str): raise MultikeyManagerError(f"kid '{kid}' already exists in wallet.") key_info = await self.wallet.assign_kid_to_key( - verkey=self._multikey_to_verkey(multikey), kid=kid + verkey=multikey_to_verkey(multikey), kid=kid ) return { "kid": key_info.kid, - "multikey": self._verkey_to_multikey(key_info.verkey), + "multikey": verkey_to_multikey(key_info.verkey), } diff --git a/aries_cloudagent/wallet/keys/tests/test_key_operations.py b/aries_cloudagent/wallet/keys/tests/test_key_operations.py index f38319a065..3d57b73dc7 100644 --- a/aries_cloudagent/wallet/keys/tests/test_key_operations.py +++ b/aries_cloudagent/wallet/keys/tests/test_key_operations.py @@ -1,7 +1,11 @@ """Test MultikeypManager.""" from unittest import IsolatedAsyncioTestCase -from aries_cloudagent.wallet.keys.manager import MultikeyManager +from aries_cloudagent.wallet.keys.manager import ( + MultikeyManager, + multikey_to_verkey, + verkey_to_multikey, +) from aries_cloudagent.core.in_memory import InMemoryProfile @@ -34,13 +38,6 @@ async def test_key_creation(self): assert key_info["multikey"] == self.multikey assert key_info["kid"] == self.kid - async def test_key_representations(self): - async with self.profile.session() as session: - assert ( - MultikeyManager(session=session)._multikey_to_verkey(self.multikey) - == self.verkey - ) - assert ( - MultikeyManager(session=session)._verkey_to_multikey(self.verkey) - == self.multikey - ) + async def test_key_transformations(self): + assert multikey_to_verkey(self.multikey) == self.verkey + assert verkey_to_multikey(self.verkey) == self.multikey diff --git a/poetry.lock b/poetry.lock index 0a8175d32d..b01ffa3ff6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -319,6 +319,17 @@ files = [ {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] +[[package]] +name = "canonicaljson" +version = "2.0.0" +description = "Canonical JSON" +optional = false +python-versions = ">=3.7" +files = [ + {file = "canonicaljson-2.0.0-py3-none-any.whl", hash = "sha256:c38a315de3b5a0532f1ec1f9153cd3d716abfc565a558d00a4835428a34fca5b"}, + {file = "canonicaljson-2.0.0.tar.gz", hash = "sha256:e2fdaef1d7fadc5d9cb59bd3d0d41b064ddda697809ac4325dced721d12f113f"}, +] + [[package]] name = "certifi" version = "2024.7.4" @@ -2898,4 +2909,4 @@ didcommv2 = ["didcomm-messaging"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "6f12628b988feacf357702b97af059abce6d5f5d5f50f57c1bf66841e24fe3c4" +content-hash = "96b2bef284f3cab28b42e3129f60591f5d71ca5b1db3fce99aafc6841f74cbe2" diff --git a/pyproject.toml b/pyproject.toml index 26c307b440..4bc5b8de5a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,6 +60,7 @@ ursa-bbs-signatures = { version = "~1.0.1", optional = true } # didcommv2 didcomm-messaging = {version = "^0.1.1a0", optional = true } +canonicaljson = "^2.0.0" [tool.poetry.group.dev.dependencies] pre-commit = "~3.8.0"