diff --git a/.github/workflows/draft.yml b/.github/workflows/draft.yml index a34bdf4..4377207 100644 --- a/.github/workflows/draft.yml +++ b/.github/workflows/draft.yml @@ -15,18 +15,18 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - python -m pip install --upgrade pip - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + pip install -U pip setuptools wheel + pip install -e .[test] - name: Test with pytest run: | - python3 -m pytest -vv -s + pytest -vv -s - name: Create Release id: create_release uses: actions/create-release@v1 @@ -36,4 +36,4 @@ jobs: tag_name: ${{ github.ref }} release_name: ${{ github.ref }} draft: true - prerelease: false \ No newline at end of file + prerelease: false diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index fddf8b5..5c6b16c 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -2,28 +2,80 @@ name: Pull Request & Push to Master Branch on: push: - branches: [ master ] + branches: + master pull_request: - branches: [ master ] + branches: + master + workflow_dispatch: jobs: - build: + lint: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.6, 3.7, 3.8] + python-version: ['3.6', '3.7', '3.8', '3.9', '3.10'] + fail-fast: false + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + pip install -U pip setuptools wheel + pip install .[test] + pip install pre-commit + - name: Run pre-commit + run: | + pre-commit install + # python<3.7 uses older flake8 version, that reports errors on decorator line + # instead of def line. We cannot satisfy both, just ignore legacy and old. + if [ $(python -V | cut -d. -f2) -le 7 ] + then + SKIP="pytest,pyproject-flake8" pre-commit run --all-files -v + else + SKIP="pytest" pre-commit run --all-files -v + fi; + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.6', '3.7', '3.8', '3.9', '3.10'] + fail-fast: false steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - python -m pip install --upgrade pip - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - name: Test with pytest + pip install -U pip setuptools wheel + pip install .[test] + - name: Run pytest + run: | + pytest -vv -s + + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Set up Python 3.10 + uses: actions/setup-python@v3 + with: + python-version: '3.10' + - name: Install dependencies + run: | + pip install -U pip setuptools wheel + pip install -e .[docs] + - name: Make docs run: | - python3 -m pytest -vv -s + cd docs + make clean html + sphinx-build -b linkcheck source build/linkcheck diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..75e90fc --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,63 @@ +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.2.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-merge-conflict + - id: check-toml + - id: check-yaml +- repo: https://github.com/pre-commit/mirrors-isort + rev: v5.9.3 + hooks: + - id: isort + name: isort + args: ["--profile", "black"] +- repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black +- repo: https://github.com/csachs/pyproject-flake8 + rev: v0.0.1a4 + hooks: + - id: pyproject-flake8 + name: flake8 + additional_dependencies: [ + 'flake8-docstrings', + 'flake8-rst-docstrings', + 'flake8-2020', + 'flake8-alfred', + 'flake8-broken-line', + 'flake8-bugbear', + 'flake8-builtins', + 'flake8-debugger', + 'flake8-noqa', + 'flake8-pep3101', + 'flake8-pie', + 'flake8-pytest', + 'flake8-pytest-style', + 'flake8-simplify', + 'pep8-naming', + ] +- repo: local + hooks: + # We could use mypy-mirror, but it has some unconvenient limitations. + - id: doctest + name: doctest + entry: python -m doctest + language: system + files: "README.md" + - id: mypy + name: mypy + entry: mypy --config=pyproject.toml --cache-dir=/dev/null --no-incremental + language: system + types: [ python ] + exclude: "docs/*" + - id: pytest + name: pytest + entry: pytest -vv -s + language: system + types: [ python ] + pass_filenames: false + exclude: "setup.py" + always_run: true diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000..6f9c40d --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,21 @@ +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +version: 2 +build: + os: ubuntu-20.04 + tools: + python: "3.10" + apt_packages: + - graphviz + +sphinx: + configuration: docs/source/conf.py + +python: + install: + - method: pip + path: . + extra_requirements: + - docs + - test diff --git a/Makefile b/Makefile index fe10a5b..097459a 100644 --- a/Makefile +++ b/Makefile @@ -1,11 +1,13 @@ install: python3 -m venv .env - . .env/bin/activate && pip3 install -r requirements.txt + . .env/bin/activate && python3 -m pip install -e . + pre-commit install test: - . .env/bin/activate && python3 -m pytest --cov=thor_devkit --no-cov-on-fail --cov-report=term-missing -vv -s + . .env/bin/activate && python3 -m pytest -vv -s publish: test rm -rf dist/* - . .env/bin/activate && python3 setup.py sdist bdist_wheel - . .env/bin/activate && python3 -m twine upload dist/* + . .env/bin/activate \ + && python3 setup.py sdist bdist_wheel \ + && python3 -m twine upload dist/* diff --git a/README.md b/README.md index 7e57e79..30b51a0 100644 --- a/README.md +++ b/README.md @@ -1,487 +1,539 @@ + +[![PyPi Version](https://img.shields.io/pypi/v/thor_devkit.svg)](https://pypi.python.org/pypi/thor_devkit/) +[![Python Versions](https://img.shields.io/pypi/pyversions/thor_devkit.svg)](https://pypi.python.org/pypi/thor_devkit/) +[![Read the Docs](https://readthedocs.org/projects/thor-devkitpy-alt/badge/?version=latest)](https://thor-devkitpy-alt.readthedocs.io/en/latest/?badge=latest) +[![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit) +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) + # VeChain Thor Devkit (SDK) in Python 3 -Python 3 (Python 3.6+) library to assist smooth development on VeChain for developers and hobbyists. - -| Content | -| --------------------------------------------------------- | -| Public key, private key, address conversion. | -| Mnemonic Wallets. | -| HD Wallet. | -| Keystore. | -| Various Hashing functions. | -| Signing messages. | -| Verify signature of messages. | -| Bloom filter. | -| Transaction Assembling (**Multi-task Transaction, MTT**). | -| Fee Delegation Transaction (**VIP-191**). | -| Self-signed Certificate (**VIP-192**). | -| ABI decoding of "functions" and "events" in logs. | +Python 3 (``Python 3.6+``) library to assist smooth development on VeChain for developers and hobbyists. + +- [VeChain Thor Devkit (SDK) in Python 3](#vechain-thor-devkit--sdk--in-python-3) +- [Install](#install) +- [Tutorials](#tutorials) + + [Validation](#validation) + + [Private/Public Keys](#private-public-keys) + + [Sign & Verify Signature](#sign---verify-signature) + + [Mnemonic Wallet](#mnemonic-wallet) + + [HD Wallet](#hd-wallet) + + [Keystore](#keystore) + + [Hash the Messages](#hash-the-messages) + + [Bloom Filter](#bloom-filter) + + [Transaction](#transaction) + + [Transaction (VIP-191)](#transaction--vip-191-) + + [Sign/Verify Certificate (VIP-192)](#sign-verify-certificate--vip-192-) + + [ABI](#abi) +- [Tweak the Code](#tweak-the-code) + * [Layout](#layout) + * [Local Development](#local-development) + * [Knowledge](#knowledge) + * [Upgrading to version 2.0.0](#upgrading-to-version-200) ... and will always be updated with the **newest** features on VeChain. +Read our [documentation](https://thor-devkitpy-alt.readthedocs.io/en/latest/) on ReadTheDocs. + # Install + ```bash pip3 install thor-devkit -U ``` ***Caveat: Bip32 depends on the ripemd160 hash library, which should be present on your system.*** -# Tutorials +Supported extras: -### Private/Public Keys -```python -from thor_devkit import cry -from thor_devkit.cry import secp256k1 +- `test`: install developer requirements (`pip install thor-devkit[test]`). +- `docs`: install `sphinx`-related packages (`pip install thor-devkit[test,docs]`). -private_key = secp256k1.generate_privateKey() +# Tutorials -public_key = secp256k1.derive_publicKey(private_key) +### Validation -_address_bytes = cry.public_key_to_address(public_key) -address = '0x' + _address_bytes.hex() +Many modules and classes have `validate` and `is_valid` methods. They perform exactly the same validation, but the former raises exceptions for malformed inputs (returns `True` for valid), while the latter returns `False` for invalid and `True` for valid inputs. -print( address ) -# 0x86d8cd908e43bc0076bc99e19e1a3c6221436ad0 -print('is address?', cry.is_address(address)) -# is address? True -print( cry.to_checksum_address(address) ) -# 0x86d8CD908e43BC0076Bc99e19E1a3c6221436aD0 +### Private/Public Keys + +```pycon +>>> from thor_devkit import cry +>>> from thor_devkit.cry import secp256k1 +>>> private_key = secp256k1.generate_private_key() +>>> public_key = secp256k1.derive_public_key(private_key) +>>> _address_bytes = cry.public_key_to_address(public_key) +>>> address = '0x' + _address_bytes.hex() +>>> address # doctest:+SKIP +'0x86d8cd908e43bc0076bc99e19e1a3c6221436ad0' +>>> cry.is_address(address) # Is it a valid address? +True +>>> cry.to_checksum_address(address) # doctest:+SKIP +'0x86d8CD908e43BC0076Bc99e19E1a3c6221436aD0' ``` ### Sign & Verify Signature -```python -from thor_devkit import cry -from thor_devkit.cry import secp256k1 +```pycon +>>> from thor_devkit.cry import secp256k1, keccak256 +>>> private_key = bytes.fromhex( +... '7582be841ca040aa940fff6c05773129e135623e41acce3e0b8ba520dc1ae26a' +... ) # bytes +>>> msg_hash, _ = keccak256([b'hello world']) # bytes -# bytes -private_key = bytes.fromhex('7582be841ca040aa940fff6c05773129e135623e41acce3e0b8ba520dc1ae26a') -# bytes -msg_hash, _ = cry.keccak256([b'hello world']) +Sign the message hash: +>>> signature = secp256k1.sign(msg_hash, private_key) # bytes -# Sign the message hash. -# bytes -signature = secp256k1.sign(msg_hash, private_key) +Recover public key from given message hash and signature: +>>> public_key = secp256k1.recover(msg_hash, signature) # bytes -# Recover public key from given message hash and signature. -# bytes -public_key = secp256k1.recover(msg_hash, signature) ``` ### Mnemonic Wallet -```python -from thor_devkit.cry import mnemonic - -words = mnemonic.generate() -print(words) -# ['fashion', 'reduce', 'resource', 'ordinary', 'seek', 'kite', 'space', 'marriage', 'cube', 'detail', 'bundle', 'latin'] +```pycon +>>> from thor_devkit.cry import mnemonic +>>> words = mnemonic.generate() +>>> words # doctest:+SKIP +['fashion', 'reduce', 'resource', 'ordinary', 'seek', 'kite', 'space', 'marriage', 'cube', 'detail', 'bundle', 'latin'] +>>> assert mnemonic.is_valid(words) -flag = mnemonic.validate(words) -print(flag) -# True +Quickly get a Bip32 master seed for HD wallets. See below "HD Wallet". +>>> seed = mnemonic.derive_seed(words) -# Quickly get a Bip32 master seed for HD wallets. See below "HD Wallet". -seed = mnemonic.derive_seed(words) +Quickly get a private key: +>>> private_key = mnemonic.derive_private_key(words, 0) -# Quickly get a private key. -private_key = mnemonic.derive_private_key(words, 0) ``` ### HD Wallet -Hierarchical Deterministic Wallets. See [bip-32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) and [bip-44](https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki). - -```python -from thor_devkit import cry -from thor_devkit.cry import hdnode - -# Construct an HD node from words. (Recommended) -words = 'ignore empty bird silly journey junior ripple have guard waste between tenant'.split(' ') - -hd_node = cry.HDNode.from_mnemonic( - words, - init_path=hdnode.VET_EXTERNAL_PATH -) # VET wallet, you can input other string values to generate BTC/ETH/... wallets. - -# Or, construct HD node from seed. (Advanced) -seed = '28bc19620b4fbb1f8892b9607f6e406fcd8226a0d6dc167ff677d122a1a64ef936101a644e6b447fd495677f68215d8522c893100d9010668614a68b3c7bb49f' - -hd_node = cry.HDNode.from_seed( - bytes.fromhex(seed), - init_path=hdnode.VET_EXTERNAL_PATH -) # VET wallet, you can input other string values to generate BTC/ETH/... wallets. - -# Access the HD node's properties. -priv = hd_node.private_key() -pub = hd_node.public_key() -addr = hd_node.address() -cc = hd_node.chain_code() - -# Or, construct HD node from a given public key. (Advanced) -# Notice: This HD node cannot derive child HD node with "private key". -hd_node = cry.HDNode.from_public_key(pub, cc) - -# Or, construct HD node from a given private key. (Advanced) -hd_node = cry.HDNode.from_private_key(priv, cc) - -# Let it derive further child HD nodes. -for i in range(0, 3): - print('addr:', '0x'+hd_node.derive(i).address().hex()) - print('priv:', hd_node.derive(i).private_key().hex()) - -# addr: 0x339fb3c438606519e2c75bbf531fb43a0f449a70 -# priv: 27196338e7d0b5e7bf1be1c0327c53a244a18ef0b102976980e341500f492425 -# addr: 0x5677099d06bc72f9da1113afa5e022feec424c8e -# priv: 0xcf44074ec3bf912d2a46b7c84fa6eb745652c9c74e674c3760dc7af07fc98b62 -# addr: 0x86231b5cdcbfe751b9ddcd4bd981fc0a48afe921 -# priv: 2ca054a50b53299ea3949f5362ee1d1cfe6252fbe30bea3651774790983e9348 -``` -### Keystore +Hierarchical Deterministic Wallets. -```python -from thor_devkit.cry import keystore - -ks = { - "version": 3, - "id": "f437ebb1-5b0d-4780-ae9e-8640178ffd77", - "address": "dc6fa3ec1f3fde763f4d59230ed303f854968d26", - "crypto": - { - "kdf": "scrypt", - "kdfparams": { - "dklen": 32, - "salt": "b57682e5468934be81217ad5b14ca74dab2b42c2476864592c9f3b370c09460a", - "n": 262144, - "r": 8, - "p": 1 - }, - "cipher": "aes-128-ctr", - "ciphertext": "88cb876f9c0355a89cad88ee7a17a2179700bc4306eaf78fa67320efbb4c7e31", - "cipherparams": { - "iv": "de5c0c09c882b3f679876b22b6c5af21" - }, - "mac": "8426e8a1e151b28f694849cb31f64cbc9ae3e278d02716cf5b61d7ddd3f6e728" - } -} -password = b'123456' - -# Decrypt -private_key = keystore.decrypt(ks, password) - -# Encrypt -ks_backup = keystore.encrypt(private_key, password) -``` +See [BIP-32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) and [BIP-44](https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki). -### Hash the Messages -```python -from thor_devkit import cry +```pycon +>>> from thor_devkit.cry import hdnode, HDNode -result, length = cry.blake2b256([b'hello world']) -result2, length = cry.blake2b256([b'hello', b' world']) -# result == result2 +Construct an HD node from words (recommended): +>>> words = 'ignore empty bird silly journey junior ripple have guard waste between tenant'.split() -result, length = cry.keccak256([b'hello world']) -result2, length = cry.keccak256([b'hello', b' world']) -# result == result2 -``` +>>> hd_node = HDNode.from_mnemonic( +... words, +... init_path=hdnode.VET_EXTERNAL_PATH, +... ) # VET wallet, you can input other string values to generate BTC/ETH/... wallets. +Or, construct HD node from seed (advanced): +>>> seed = '28bc19620b4fbb1f8892b9607f6e406fcd8226a0d6dc167ff677d122a1a64ef936101a644e6b447fd495677f68215d8522c893100d9010668614a68b3c7bb49f' +>>> hd_node = HDNode.from_seed( +... bytes.fromhex(seed), +... init_path=hdnode.VET_EXTERNAL_PATH, +... ) # VET wallet, you can input other string values to generate BTC/ETH/... wallets. -### Bloom Filter -```python -from thor_devkit import Bloom +Access the HD node's properties: +>>> priv = hd_node.private_key +>>> pub = hd_node.public_key +>>> addr = hd_node.address +>>> cc = hd_node.chain_code -# Create a bloom filter that can store 100 items. -_k = Bloom.estimate_k(100) -b = Bloom(_k) +Or, construct HD node from a given public key (advanced) +Notice: This HD node cannot derive child HD node with "private key". +>>> hd_node = HDNode.from_public_key(pub, cc) -# Add an item to the bloom filter. -b.add(bytes('hello world', 'UTF-8')) +Or, construct HD node from a given private key (advanced): +>>> hd_node = HDNode.from_private_key(priv, cc) + +Let it derive further child HD nodes: +>>> for i in range(3): +... print('addr:', '0x' + hd_node.derive(i).address.hex()) +... print('priv:', hd_node.derive(i).private_key.hex()) +addr: 0x339fb3c438606519e2c75bbf531fb43a0f449a70 +priv: 27196338e7d0b5e7bf1be1c0327c53a244a18ef0b102976980e341500f492425 +addr: 0x5677099d06bc72f9da1113afa5e022feec424c8e +priv: cf44074ec3bf912d2a46b7c84fa6eb745652c9c74e674c3760dc7af07fc98b62 +addr: 0x86231b5cdcbfe751b9ddcd4bd981fc0a48afe921 +priv: 2ca054a50b53299ea3949f5362ee1d1cfe6252fbe30bea3651774790983e9348 -# Verify -b.test(bytes('hello world', 'UTF-8')) -# True -b.test(bytes('bye bye blue bird', 'UTF-8')) -# False ``` -### Transaction +### Keystore + +```pycon +>>> from thor_devkit.cry import keystore +>>> ks = { +... "version": 3, +... "id": "f437ebb1-5b0d-4780-ae9e-8640178ffd77", +... "address": "dc6fa3ec1f3fde763f4d59230ed303f854968d26", +... "crypto": +... { +... "kdf": "scrypt", +... "kdfparams": { +... "dklen": 32, +... "salt": "b57682e5468934be81217ad5b14ca74dab2b42c2476864592c9f3b370c09460a", +... "n": 262144, +... "r": 8, +... "p": 1 +... }, +... "cipher": "aes-128-ctr", +... "ciphertext": "88cb876f9c0355a89cad88ee7a17a2179700bc4306eaf78fa67320efbb4c7e31", +... "cipherparams": { +... "iv": "de5c0c09c882b3f679876b22b6c5af21" +... }, +... "mac": "8426e8a1e151b28f694849cb31f64cbc9ae3e278d02716cf5b61d7ddd3f6e728" +... } +... } +>>> password = b'123456' + +Decrypt: +>>> private_key = keystore.decrypt(ks, password) + +Encrypt: +>>> ks_backup = keystore.encrypt(private_key, password) -```python -from thor_devkit import cry, transaction - -# See: https://docs.vechain.org/thor/learn/transaction-model.html#model -body = { - "chainTag": int('0x4a', 16), # 0x4a/0x27/0xa4 See: https://docs.vechain.org/others/miscellaneous.html#network-identifier - "blockRef": '0x00000000aabbccdd', - "expiration": 32, - "clauses": [ - { - "to": '0x7567d83b7b8d80addcb281a71d54fc7b3364ffed', - "value": 10000, - "data": '0x000000606060' - }, - { - "to": '0x7567d83b7b8d80addcb281a71d54fc7b3364ffed', - "value": 20000, - "data": '0x000000606060' - } - ], - "gasPriceCoef": 128, - "gas": 21000, - "dependsOn": None, - "nonce": 12345678 -} - -# Construct an unsigned transaction. -tx = transaction.Transaction(body) - -# Access its properties. -tx.get_signing_hash() == cry.blake2b256([tx.encode()])[0] # True - -tx.get_signature() == None # True - -tx.get_origin() == None # True - -tx.get_intrinsic_gas() == 37432 # estimate the gas this tx gonna cost. - -# Sign the transaction with a private key. -priv_key = bytes.fromhex('7582be841ca040aa940fff6c05773129e135623e41acce3e0b8ba520dc1ae26a') -message_hash = tx.get_signing_hash() -signature = cry.secp256k1.sign(message_hash, priv_key) - -# Set the signature on the transaction. -tx.set_signature(signature) - -# Tx origin? -print(tx.get_origin()) -# 0xd989829d88b0ed1b06edf5c50174ecfa64f14a64 - -# Tx id? -print(tx.get_id()) -# 0xda90eaea52980bc4bb8d40cb2ff84d78433b3b4a6e7d50b75736c5e3e77b71ec - -# Tx encoded into bytes, ready to be sent out. -encoded_bytes = tx.encode() - -# pretty print the encoded bytes. -print('0x' + encoded_bytes.hex()) - -# http POST transaction to send the encoded_bytes to VeChain... -# See the REST API details: -# testnet: https://sync-testnet.vechain.org/doc/swagger-ui/ -# mainnet: https://sync-mainnet.vechain.org/doc/swagger-ui/ ``` -### Transaction (VIP-191) -[https://github.com/vechain/VIPs/blob/master/vips/VIP-191.md](https://github.com/vechain/VIPs/blob/master/vips/VIP-191.md) +### Hash the Messages -```python -from thor_devkit import cry, transaction +```pycon +>>> from thor_devkit.cry import blake2b256, keccak256 -delegated_body = { - "chainTag": 1, - "blockRef": '0x00000000aabbccdd', - "expiration": 32, - "clauses": [ - { - "to": '0x7567d83b7b8d80addcb281a71d54fc7b3364ffed', - "value": 10000, - "data": '0x000000606060' - }, - { - "to": '0x7567d83b7b8d80addcb281a71d54fc7b3364ffed', - "value": 20000, - "data": '0x000000606060' - } - ], - "gasPriceCoef": 128, - "gas": 21000, - "dependsOn": None, - "nonce": 12345678, - "reserved": { - "features": 1 - } -} +>>> result, length = blake2b256([b'hello world']) +>>> result2, length = blake2b256([b'hello', b' world']) +>>> assert result == result2 +>>> result.hex() +'256c83b297114d201b30179f3f0ef0cace9783622da5974326b436178aeef610' +>>> result, length = keccak256([b'hello world']) +>>> result2, length = keccak256([b'hello', b' world']) +>>> assert result == result2 +>>> result.hex() +'47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad' -delegated_tx = transaction.Transaction(delegated_body) +``` -# Indicate it is a delegated Transaction using VIP-191. -assert delegated_tx.is_delegated() == True -# Sender -addr_1 = '0xf9ea4ba688d55cc7f0eae0dd62f8271b744637bf' +### Bloom Filter -priv_1 = bytes.fromhex('58e444d4fe08b0f4d9d86ec42f26cf15072af3ddc29a78e33b0ceaaa292bcf6b') +```pycon +>>> from thor_devkit import Bloom +Create a bloom filter that can store 100 items: +>>> _k = Bloom.estimate_k(100) +>>> _k +14 +>>> b = Bloom(_k) -# Gas Payer -addr_2 = '0x34b7538c2a7c213dd34c3ecc0098097d03a94dcb' +Add an item to the bloom filter: +>>> b.add(b'hello world') +True -priv_2 = bytes.fromhex('0bfd6a863f347f4ef2cf2d09c3db7b343d84bb3e6fc8c201afee62de6381dc65') +Verify: +>>> assert b'hello world' in b +>>> assert b'bye bye blue bird' not in b +``` -h = delegated_tx.get_signing_hash() # Sender hash to be signed. -dh = delegated_tx.get_signing_hash(addr_1) # Gas Payer hash to be signed. +### Transaction -# Sender sign the hash. -# Gas payer sign the hash. -# Concat two parts to forge a legal signature. -sig = cry.secp256k1.sign(h, priv_1) + cry.secp256k1.sign(dh, priv_2) +[Docs](https://docs.vechain.org/thor/learn/transaction-model.html#model) + +[`chainTag` explained](https://docs.vechain.org/others/miscellaneous.html#network-identifier) + +See the VeChain net REST API details (e.g. post transaction): +[testnet](https://sync-testnet.vechain.org/doc/swagger-ui/), +[mainnet](https://sync-mainnet.vechain.org/doc/swagger-ui/) + +```pycon +>>> from thor_devkit import cry +>>> from thor_devkit.transaction import Transaction +>>> body = { +... "chainTag": int('0x4a', 16), # 0x4a/0x27/0xa4 +... "blockRef": '0x00000000aabbccdd', +... "expiration": 32, +... "clauses": [ +... { +... "to": '0x7567d83b7b8d80addcb281a71d54fc7b3364ffed', +... "value": 10000, +... "data": '0x000000606060' +... }, +... { +... "to": '0x7567d83b7b8d80addcb281a71d54fc7b3364ffed', +... "value": 20000, +... "data": '0x000000606060' +... } +... ], +... "gasPriceCoef": 128, +... "gas": 21000, +... "dependsOn": None, +... "nonce": 12345678 +... } + +Construct an unsigned transaction: +>>> tx = Transaction(body) + +Access its properties: +>>> assert tx.get_signing_hash() == cry.blake2b256([tx.encode()])[0] +>>> assert tx.signature is None +>>> assert tx.origin is None +>>> assert tx.intrinsic_gas == 37432 # estimate the gas this tx gonna cost. + +Sign the transaction with a private key: +>>> priv_key = bytes.fromhex( +... '7582be841ca040aa940fff6c05773129e135623e41acce3e0b8ba520dc1ae26a' +... ) +>>> message_hash = tx.get_signing_hash() +>>> signature = cry.secp256k1.sign(message_hash, priv_key) +>>> tx.signature = signature + +>>> tx.origin +'0xd989829d88b0ed1b06edf5c50174ecfa64f14a64' +>>> tx.id +'0xf2c89da3d85952e99961d409abb0b2afb7fa266acc5ed23fb5d23a5d3db395d7' + +Tx encoded into bytes, ready to be sent out: +>>> "0x" + tx.encode().hex() +'0xf8974a84aabbccdd20f840df947567d83b7b8d80addcb281a71d54fc7b3364ffed82271086000000606060df947567d83b7b8d80addcb281a71d54fc7b3364ffed824e208600000060606081808252088083bc614ec0b8419d500064647f37254e22b3ffac04bb5ccff5d91b6d6103a53baeedac17708b8817c6137e1efe3472f3b6fd8af258c2c3945b742c58ba49de2796c8bb54a0bb0601' -delegated_tx.set_signature(sig) +``` + +### Transaction (VIP-191) + +See [VIP-191](https://github.com/vechain/VIPs/blob/master/vips/VIP-191.md) for reference. + +```pycon +>>> from thor_devkit.cry import secp256k1 +>>> from thor_devkit.transaction import Transaction +>>> delegated_body = { +... "chainTag": 1, +... "blockRef": '0x00000000aabbccdd', +... "expiration": 32, +... "clauses": [ +... { +... "to": '0x7567d83b7b8d80addcb281a71d54fc7b3364ffed', +... "value": 10000, +... "data": '0x000000606060' +... }, +... { +... "to": '0x7567d83b7b8d80addcb281a71d54fc7b3364ffed', +... "value": 20000, +... "data": '0x000000606060' +... } +... ], +... "gasPriceCoef": 128, +... "gas": 21000, +... "dependsOn": None, +... "nonce": 12345678, +... "reserved": { +... "features": 1 +... } +... } +>>> delegated_tx = Transaction(delegated_body) + +Indicate it is a delegated Transaction using VIP-191. +>>> assert delegated_tx.is_delegated + +Sender: +>>> addr_1 = '0xf9ea4ba688d55cc7f0eae0dd62f8271b744637bf' +>>> priv_1 = bytes.fromhex('58e444d4fe08b0f4d9d86ec42f26cf15072af3ddc29a78e33b0ceaaa292bcf6b') + +Gas Payer: +>>> addr_2 = '0x34b7538c2a7c213dd34c3ecc0098097d03a94dcb' +>>> priv_2 = bytes.fromhex('0bfd6a863f347f4ef2cf2d09c3db7b343d84bb3e6fc8c201afee62de6381dc65') + +>>> h = delegated_tx.get_signing_hash() # Sender hash to be signed. +>>> dh = delegated_tx.get_signing_hash(addr_1) # Gas Payer hash to be signed. + +Sender signs the hash. +Gas payer signs the hash. +Concatenate two parts to forge a legal signature: +>>> sig = secp256k1.sign(h, priv_1) + secp256k1.sign(dh, priv_2) +>>> delegated_tx.signature = sig + +>>> assert delegated_tx.origin == addr_1 +>>> assert delegated_tx.delegator == addr_2 -assert delegated_tx.get_origin() == addr_1 -assert delegated_tx.get_delegator() == addr_2 ``` ### Sign/Verify Certificate (VIP-192) + [https://github.com/vechain/VIPs/blob/master/vips/VIP-192.md](https://github.com/vechain/VIPs/blob/master/vips/VIP-192.md) -```python -from thor_devkit import cry -from thor_devkit.cry import secp256k1 -from thor_devkit import certificate - -# My address. -address = '0xd989829d88b0ed1b06edf5c50174ecfa64f14a64' -# My corresponding private key. -private_key = bytes.fromhex('7582be841ca040aa940fff6c05773129e135623e41acce3e0b8ba520dc1ae26a') - -# My cert. -cert_dict = { - 'purpose': 'identification', - 'payload': { - 'type': 'text', - 'content': 'fyi' - }, - 'domain': 'localhost', - 'timestamp': 1545035330, - 'signer': address -} - -# Construct a cert, without signature. -cert = certificate.Certificate(**cert_dict) - -# Sign the cert with my private key. -sig_bytes = secp256k1.sign( - cry.blake2b256([ - certificate.encode(cert).encode('utf-8') - ])[0], - private_key -) -signature = '0x' + sig_bytes.hex() - -# Mount the signature onto the cert. -cert_dict['signature'] = signature - -# Construct a cert, with signature. -cert2 = certificate.Certificate(**cert_dict) - -# Verify, if verify failed it will throw Exceptions. -certificate.verify(cert2) +```pycon +>>> from thor_devkit.cry import secp256k1 +>>> from thor_devkit.certificate import Certificate + +My private key and address: +>>> address = '0xd989829d88b0ed1b06edf5c50174ecfa64f14a64' +>>> private_key = bytes.fromhex( +... '7582be841ca040aa940fff6c05773129e135623e41acce3e0b8ba520dc1ae26a' +... ) + +My certificate data: +>>> cert_dict = { +... 'purpose': 'identification', +... 'payload': { +... 'type': 'text', +... 'content': 'fyi', +... }, +... 'domain': 'localhost', +... 'timestamp': 1545035330, +... 'signer': address, +... } + +Construct a certificate without signature: +>>> cert = Certificate(**cert_dict) + +Sign the certificate with my private key: +>>> sig_bytes = secp256k1.sign( +... cry.blake2b256([ +... cert.encode().encode() # encode to string, then string to bytes. +... ])[0], +... private_key +... ) +>>> signature = '0x' + sig_bytes.hex() + +Construct a certificate with signature: +>>> cert_dict['signature'] = signature +>>> cert2 = Certificate(**cert_dict) + +Verify, if verify failed it will throw Exceptions. +>>> cert2.verify() +True + +Or get boolean validness: +>>> assert cert2.is_valid() + ``` ### ABI Encode function name and parameters according to ABI. -```python -from thor_devkit import abi - -abi_dict = { - "constant": False, - "inputs": [ - { - "name": "a1", - "type": "uint256" - }, - { - "name": "a2", - "type": "string" - } - ], - "name": "f1", - "outputs": [ - { - "name": "r1", - "type": "address" - }, - { - "name": "r2", - "type": "bytes" - } - ], - "payable": False, - "stateMutability": "nonpayable", - "type": "function" -} - -# Verify if abi_dict is in good shape. -f1 = abi.FUNCTION(abi_dict) - -# Get a function instance of the abi. -f = abi.Function(f1) - -# Get function selector: -selector = f.selector.hex() -selector == '27fcbb2f' - -# Encode the function input parameters. -r = f.encode([1, 'foo'], to_hex=True) -r == '0x27fcbb2f000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000003666f6f0000000000000000000000000000000000000000000000000000000000' - -# Decode function return result according to abi. -data = '000000000000000000000000abc000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000003666f6f0000000000000000000000000000000000000000000000000000000000' - -r = f.decode(bytes.fromhex(data)) -# { -# "0": '0xabc0000000000000000000000000000000000001', -# "1": b'666f6f', -# "r1": '0xabc0000000000000000000000000000000000001', -# "r2": b'666f6f' -# } +```pycon +>>> from pprint import pprint +>>> from thor_devkit.abi import Function +>>> abi_dict = { +... "inputs": [ +... { +... "name": "a1", +... "type": "uint256" +... }, +... { +... "name": "a2", +... "type": "string" +... } +... ], +... "name": "f1", +... "outputs": [ +... { +... "name": "r1", +... "type": "address" +... }, +... { +... "name": "r2", +... "type": "bytes" +... } +... ], +... "stateMutability": "nonpayable", +... "type": "function" +... } + +Create a function instance of the ABI: +>>> f = Function(abi_dict) + +Get function selector: +>>> f.selector.hex() +'27fcbb2f' + +Encode the function input parameters: +>>> f.encode([1, 'foo'], to_hex=True) +'0x27fcbb2f000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000003666f6f0000000000000000000000000000000000000000000000000000000000' + +Decode function return result according to ABI: +>>> data = '000000000000000000000000abc000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000003666f6f0000000000000000000000000000000000000000000000000000000000' + +>>> result = f.decode(bytes.fromhex(data)) +>>> result.to_dict() # Use dictionary form +{'r1': '0xabc0000000000000000000000000000000000001', 'r2': b'foo'} +>>> assert result[0] == '0xabc0000000000000000000000000000000000001' # Access by index +>>> assert result.r2 == b'foo' # Or by name + +Create function from solidity code: +>>> contract = ''' +... contract A { +... function f(uint x) public returns(bool) {} +... } +... ''' +>>> func = Function.from_solidity(text=contract) +>>> pprint(func._definition) +{'inputs': [{'internalType': 'uint256', 'name': 'x', 'type': 'uint256'}], + 'name': 'f', + 'outputs': [{'internalType': 'bool', 'name': '', 'type': 'bool'}], + 'stateMutability': 'nonpayable', + 'type': 'function'} + ``` Decode logs according to data and topics. -```python -from thor_devkit import abi - -e2 = abi.EVENT({ - "anonymous": True, - "inputs": [ - { - "indexed": True, - "name": "a1", - "type": "uint256" - }, - { - "indexed": False, - "name": "a2", - "type": "string" - } - ], - "name": "E2", - "type": "event" -}) - -ee = abi.Event(e2) - -# data in hex format. -r = ee.decode( - data=bytes.fromhex('00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000003666f6f0000000000000000000000000000000000000000000000000000000000'), - topics=[ - bytes.fromhex('0000000000000000000000000000000000000000000000000000000000000001') - ] -) - -# r == { "0": 1, "1": "foo", "a1": 1, "a2": "foo" } +```pycon +>>> from thor_devkit.abi import Event +>>> data = { +... "anonymous": True, +... "inputs": [ +... { +... "indexed": True, +... "name": "a1", +... "type": "uint256" +... }, +... { +... "indexed": False, +... "name": "a2", +... "type": "string" +... } +... ], +... "name": "E2", +... "type": "event" +... } +>>> event = Event(data) + +Decode data in hex format: +>>> result = event.decode( +... data=bytes.fromhex('00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000003666f6f0000000000000000000000000000000000000000000000000000000000'), +... topics=[ +... bytes.fromhex('0000000000000000000000000000000000000000000000000000000000000001') +... ] +... ) +>>> result.to_dict() +{'a1': 1, 'a2': 'foo'} +>>> result[0] +1 +>>> result.a2 +'foo' + +Create event from solidity code: +>>> contract = ''' +... contract A { +... event E(uint indexed a1, string a2) anonymous; +... } +... ''' +>>> func = Event.from_solidity(text=contract) +>>> pprint(func._definition) +{'anonymous': True, + 'inputs': [{'indexed': True, + 'internalType': 'uint256', + 'name': 'a1', + 'type': 'uint256'}, + {'indexed': False, + 'internalType': 'string', + 'name': 'a2', + 'type': 'string'}], + 'name': 'E', + 'type': 'event'} + ``` # Tweak the Code ## Layout + ``` . ├── LICENSE @@ -502,11 +554,28 @@ r = ee.decode( │ ├── mnemonic.py │ ├── secp256k1.py │ └── utils.py + ├── exceptions.py ├── rlp.py - └── transaction.py + ├── transaction.py + └── validation.py ``` ## Local Development + +You can setup local version with + +```bash +# Create new environment (you can use other name or reuse existing one) +python -m venv .env +. .env/bin/activate +# Editable install +pip install -e .[test] +# Install git hooks +pre-commit install +``` + +Or with help of `Makefile`: + ```bash # install dependencies make install @@ -514,6 +583,18 @@ make install make test ``` +All project tests are based on `pytest`. You can use `tox` (configuration resides in `pyproject.toml`) to test against multiple `python` versions (it will also happen in CI, when you submit a PR). + +You can run `pre-commit` hooks without commiting with + +```bash +pre-commit run --all-files +``` + +We enforce strict coding style: `black` is a part of `pre-commit` setup, also it +includes `flake8` for additional validation. + + ## Knowledge | Name | Bytes | Description | @@ -526,3 +607,18 @@ make test | message hash | 32 | hash of a message | | signature | 65 | signing result, last bit as recovery parameter | | seed | 64 | used to derive bip32 master key | + + +## Upgrading to version 2.0.0 + +In version `2.0.0` a few backwards incompatible changes were introduced. + +- Transaction methods `get_delegator`, `get_intrinsic_gas`, `get_signature`, `set_signature`, `get_origin` are deprecated in favour of properties. `Transaction.get_body` is replaced with `Transaction.body` property and `Transaction.copy_body()` method. `Transaction.is_delegated` is now a property instead of regular method. +- Certificate `__init__` method performs basic validation, so some invalid signatures will be rejected during instantiation and not in `verify` method. Module-level functions `encode` and `verify` are deprecated in favour of `Certificate` methods. +- `Bloom` filter has `__contains__` now (so you can use `element in bloom_filter`). +- ABI module has changed significantly. Function and Event can now be instantiated from solidity code with `from_solidity` method. New methods were introduced for encoding and decoding. `decode` results are now custom `namedtuple`'s instead of strange dictionary format, see docs for reference. `Event.get_signature` and `Function.get_selector` are deprecated in favour of `Event.signature` and `Function.selector` properties. +- RLP module functions `pack` and `unpack` are now deprecated, use `BaseWrapper` or `ScalarKind` `serialize` and `deserialize` methods instead. +- Functions with odd names `derive_publicKey` and `generate_privateKey` are deprecated in favour of `derive_public_key` and `generate_private_key`. +- `mnemonic.validate` is deprecated, use `mnemonic.is_valid` instead. +- `keystore.well_formed` is deprecated, use `keystore.validate` and `keystore.is_valid` instead. +- `HDNode` uses properties instead of methods for simple attributes: `private_key`, `public_key`, `chain_code`, `address`, `fingerprint`. diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..d0c3cbf --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..747ffb7 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/source/_static/.gitkeep b/docs/source/_static/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/docs/source/_templates/.gitkeep b/docs/source/_templates/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/docs/source/abi.rst b/docs/source/abi.rst new file mode 100644 index 0000000..a9d04bf --- /dev/null +++ b/docs/source/abi.rst @@ -0,0 +1,77 @@ +ABI encoding +============ + +Basic concepts +-------------- + +Function selector +***************** + +Function selector is computed as first 4 bytes of:: + + sha3(signature) + +where ``signature`` is of form ``funcName(uint8,bool,string)`` (types of arguments +in parentheses) and must not contain any whitespace. All types are normalized +to standard form (e.g. ``fixed`` is transformed into ``fixed128x18`` before hashing). + +Supported types +*************** + +.. table:: Supported types for argument encoding + :width: 100% + + +---------------------+-----------------------------------------------------------+ + | Type | Description | + +=====================+===========================================================+ + | **Elementary types** | + +---------------------+-----------------------------------------------------------+ + | | ``uint`` | | Unsigned and signed ``M``-bit integer. | + | | ``int`` | | :math:`0 < M \leq 256`, | + | | :math:`M \equiv 0 \pmod 8` | + +---------------------+-----------------------------------------------------------+ + | ``address`` | Synonym for ``uint160`` with special semantical meaning. | + +---------------------+-----------------------------------------------------------+ + | | ``int`` | | Synonyms for ``int256`` and ``uint256`` | + | | ``uint`` | | Normalized to full form when computing selector. | + +---------------------+-----------------------------------------------------------+ + | ``bool`` | Equivalent to ``uint8``, restricted to ``0`` or ``1`` | + +---------------------+-----------------------------------------------------------+ + | | ``fixedx`` | | Signed (unsigned) fixed-point ``M``-bit number | + | | | such that number :math:`x` represents value | + | | :math:`\left\lfloor \frac{x}{10^N} \right\rfloor` | + | | ``ufixedx`` | | :math:`0 < M \leq 256`, :math:`0 < N \leq 80`, | + | | :math:`M \equiv N \equiv 0 \pmod 8` | + +---------------------+-----------------------------------------------------------+ + | | ``fixed`` | | Synonyms for ``fixed128x18`` and ``fixed128x18`` | + | | ``ufixed`` | | Normalized to full form when computing selector. | + +---------------------+-----------------------------------------------------------+ + |``bytes`` | Sequence of ``M`` bytes. | + +---------------------+-----------------------------------------------------------+ + |``function`` | Synonym of ``bytes24``. | + | | 20 bytes address + 4 bytes signature. | + +---------------------+-----------------------------------------------------------+ + | **Fixed-length types** | + +---------------------+-----------------------------------------------------------+ + | ``[M]`` | | Fixed sized array of type ````. | + | | | Examples: ``int[10]``, ``uint256[33]`` | + +---------------------+-----------------------------------------------------------+ + | **Dynamic types** | + +---------------------+-----------------------------------------------------------+ + | ``bytes`` | Bytes of arbitrary length. | + +---------------------+-----------------------------------------------------------+ + | ``string`` | String of arbitrary length. | + +---------------------+-----------------------------------------------------------+ + | ``[]`` | Array of ```` of arbitrary length. | + +---------------------+-----------------------------------------------------------+ + +Further reading +*************** + +`Specification `_ + +API documentation +----------------- + +.. automodule:: thor_devkit.abi + :inherited-members: dict diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 0000000..32414fb --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,77 @@ +"""Configuration file for the Sphinx documentation builder.""" +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import sys +from pathlib import Path + +sys.path.insert(0, str(Path(__file__).parent.resolve())) + +from ext.monkey_patch_sphinx import monkey_patch # noqa: E402 # We need new PATH here. + +monkey_patch() + + +# -- Project information ----------------------------------------------------- + +project = "thor-devkit.py" +copyright = "2022, laalaguer" # noqa: A001 +author = "laalaguer" + +release = "2.0.0" + + +# -- General configuration --------------------------------------------------- + +extensions = [ + # Built-in plugins + "sphinx.ext.napoleon", # Numpy-style docstring preprocessing + "sphinx.ext.autodoc", # Docstring embedding into final documents + "sphinx.ext.intersphinx", # References to stl + "sphinx.ext.graphviz", # Nice visual diagrams representation + "sphinx.ext.viewcode", # Links to source + # Third-party + "autodocsumm", # Table of module/class elements + # Custom + "ext.toc_plugin", # Add items to left floating table of contents + "ext.types_group", # Separate group for type definitions and validation schemas + "ext.monkey_patch_sphinx", # Fix up ``dict`` as ``TypedDict`` base after all. +] + +templates_path = ["_templates"] +exclude_patterns = [] + + +# -- Options for HTML output ------------------------------------------------- + +html_theme = "sphinx_rtd_theme" +html_static_path = ["_static"] + +# -- Autodoc config ---------------------------------------------------------- + +autoclass_content = "both" +autodoc_default_options = { + "members": True, + "undoc-members": True, + "show-inheritance": True, + "autosummary": True, + "autosummary-members": True, + "autosummary-undoc-members": True, + "autosummary-nosignatures": True, + "member-order": "bysource", +} + +intersphinx_mapping = { + "python": ("https://docs.python.org/3/", None), + "voluptuous": ("http://alecthomas.github.io/voluptuous/docs/_build/html/", None), + "bip_utils": ("https://bip-utils.readthedocs.io/en/latest/", None), + "solcx": ("https://solcx.readthedocs.io/en/latest/", None), +} diff --git a/docs/source/ext/__init__.py b/docs/source/ext/__init__.py new file mode 100644 index 0000000..44f0d8b --- /dev/null +++ b/docs/source/ext/__init__.py @@ -0,0 +1 @@ +"""Custom sphinx plugins.""" diff --git a/docs/source/ext/monkey_patch_sphinx.py b/docs/source/ext/monkey_patch_sphinx.py new file mode 100644 index 0000000..0660cd1 --- /dev/null +++ b/docs/source/ext/monkey_patch_sphinx.py @@ -0,0 +1,86 @@ +"""Monkey patch :mod:`sphinx` to play well with specific inheritance. + +We set __doc_mro__ attribute for classes that should be altered. + +Then :func:`sphinx.util.inspect.getmro` is patched to honor this attribute. + +Finally, :mod:`sphinx.ext.autosummary` does not read inherited variable members, +so we patch it too to use our brand-new ``getmro``. +""" +from typing import TypedDict, is_typeddict + +from sphinx.util import inspect + +old_getmro = inspect.getmro + + +def new_getmro(obj): + """Try to extract ``__doc_mro__`` attribute, fallback to default behavior.""" + doc_mro = getattr(obj, "__doc_mro__", None) + if isinstance(doc_mro, tuple): + return doc_mro + + return old_getmro(obj) + + +def new_import_ivar_by_name( + name, + prefixes=[None], # noqa: B006 # It is not my decision! + grouped_exception=False, +): + """Get instance variables, including parents traversing.""" + from sphinx.ext import autosummary as asum + + # This is original source + try: + name, attr = name.rsplit(".", 1) + real_name, obj, parent, modname = asum.import_by_name( + name, prefixes, grouped_exception + ) + qualname = real_name.replace(modname + ".", "") + analyzer = asum.ModuleAnalyzer.for_module(getattr(obj, "__module__", modname)) + analyzer.analyze() + if ( + (qualname, attr) in analyzer.attr_docs + # check for presence in `annotations` to include dataclass attributes + or (qualname, attr) in analyzer.annotations + ): + return real_name + "." + attr, asum.INSTANCEATTR, obj, modname + except (ImportError, ValueError, asum.PycodeError) as exc: + raise ImportError from exc + except asum.ImportExceptionGroup: + raise # pass through it as is + + # ===================== Added part ============================================== + # Try to resolve instance-level variables by MRO, if they were requested. + for base in new_getmro(obj): + qname = getattr(base, "__qualname__", None) or getattr(base, "__name__", None) + if not qname: + continue + if (qname, attr) in analyzer.attr_docs or (qname, attr) in analyzer.annotations: + mname = getattr(base, "__module__", modname) + return f"{mname}.{qname}.{attr}", asum.INSTANCEATTR, base, modname + # =============================================================================== + + # Fail as before, if no success. + raise ImportError + + +def monkey_patch(): + """Script entry point.""" + inspect.getmro = new_getmro + + from sphinx.ext import autosummary + + autosummary._module.import_ivar_by_name = new_import_ivar_by_name + + +def fix_typeddict_bases(app, name, obj, options, bases): + """Fix ``dict`` display for ``TypedDict``.""" + if is_typeddict(obj): + bases[:] = [TypedDict] + + +def setup(app): + """Set up this extension.""" + app.connect("autodoc-process-bases", fix_typeddict_bases) diff --git a/docs/source/ext/toc_plugin.py b/docs/source/ext/toc_plugin.py new file mode 100644 index 0000000..8a4d250 --- /dev/null +++ b/docs/source/ext/toc_plugin.py @@ -0,0 +1,262 @@ +"""Sphinx plugin to add references to classes and methods to left side navigation. + +Functions :func:`_build_toc_node`, :func:`_find_toc_node` +and :func:`_get_toc_reference` are copied from :mod:`autoapi.toctree`. +:func:`_traverse_parent` is modified. + +Credits: `sphinx-autoapi `__. +""" +import sphinx +import sphinx.util.logging +from docutils import nodes +from sphinx import addnodes +from sphinx.util.docutils import SphinxDirective + +LOGGER = sphinx.util.logging.getLogger(__name__) + + +def _build_toc_node(docname, anchor="anchor", text="test text", bullet=False): + """Create the node structure that Sphinx expects for TOC Tree entries. + + The ``bullet`` argument wraps it in a ``nodes.bullet_list``, + which is how you nest TOC Tree entries. + """ + reference = nodes.reference( + "", + "", + internal=True, + refuri=docname, + anchorname="#" + anchor, + *[nodes.Text(text, text)], + ) + para = addnodes.compact_paragraph("", "", reference) + ret_list = nodes.list_item("", para) + return nodes.bullet_list("", ret_list) if bullet else ret_list + + +def _traverse_parent(node, tester): + """Traverse up the node's parents until you hit the ``objtypes`` referenced. + + node + Node to traverse. + objtypes: Callable[[object], bool]. + Type to find. + """ + curr_node = node.parent + while curr_node is not None: + if tester(curr_node): + return curr_node + curr_node = curr_node.parent + return None + + +def _find_toc_node(toc, ref_id, objtype): + """Find the actual TOC node for a ref_id. + + Depends on the object type: + * Section - First section (refuri) or 2nd+ level section (anchorname) + * Desc - Just use the anchor name + """ + for check_node in toc.traverse(nodes.reference): + if objtype == nodes.section and ( + check_node.attributes["refuri"] == ref_id + or check_node.attributes["anchorname"] == "#" + ref_id + ): + return check_node + if ( + objtype == addnodes.desc + and check_node.attributes["anchorname"] == "#" + ref_id + ): + return check_node + return None + + +def _get_toc_reference(node, toc, docname): + """Get reference from map from specific node to it's part of the toctree. + + It takes a specific incoming ``node``, + and returns the actual TOC Tree node that is said reference. + """ + if isinstance(node, nodes.section) and isinstance(node.parent, nodes.document): + # Top Level Section header + ref_id = docname + toc_reference = _find_toc_node(toc, ref_id, nodes.section) + elif isinstance(node, nodes.section): + # Nested Section header + ref_id = node.attributes["ids"][0] + toc_reference = _find_toc_node(toc, ref_id, nodes.section) + else: + # Desc node + try: + ref_id = node.children[0].attributes["ids"][0] + toc_reference = _find_toc_node(toc, ref_id, addnodes.desc) + except (KeyError, IndexError): + LOGGER.warning( + "Invalid desc node", + exc_info=True, + type="autoapi", + subtype="toc_reference", + ) + toc_reference = None + + return toc_reference + + +def _check_key(key, env, first_run=True): + if key in env: + return env[key] + if not first_run and f"{key}.*" in env: + return env[f"{key}.*"] + if "." in key: + key, _ = key.rsplit(".", 1) + return _check_key(key, env, False) + return None + + +def add_domain_to_toctree(app, doctree, docname): + """Add domain objects to the toctree dynamically. + + This should be attached to the ``doctree-resolved`` event. + This works by: + + * Finding each domain node (addnodes.desc) + * Figuring out it's parent that will be in the toctree + (nodes.section, or a previously added addnodes.desc) + * Finding that parent in the TOC Tree based on it's ID + * Taking that element in the TOC Tree, + and finding it's parent that is a TOC Listing (nodes.bullet_list) + * Adding the new TOC element for our specific node as a child + of that nodes.bullet_list. + This checks that bullet_list's last child, + and checks that it is also a nodes.bullet_list, + effectively nesting it under that element + """ + toc = app.env.tocs[docname] + for desc_node in doctree.traverse(addnodes.desc): + try: + ref_id = desc_node.children[0].attributes["ids"][0] + except (KeyError, IndexError): + # autodoc-style directives already add nodes to the toc. + continue + if _check_key(ref_id, app.env.custom_toc) is False: + continue + + # This is the actual object that will exist in the TOC Tree + # Sections by default, and other Desc nodes that we've previously placed. + parent_node = _traverse_parent( + desc_node, lambda n: isinstance(n, (addnodes.desc, nodes.section)) + ) + if not parent_node: + continue + + toc_reference = _get_toc_reference(parent_node, toc, docname) + if not toc_reference: + continue + + # # Get the last child of our parent's bullet list, this is where "we" live. + toc_insertion_point = _traverse_parent( + toc_reference, lambda n: isinstance(n.parent, nodes.bullet_list) + ) + + try: + # Python domain object + ref_text = desc_node[0].attributes["fullname"].split(".")[-1].split("(")[0] + except (KeyError, IndexError): + # Use `astext` for other types of domain objects + ref_text = desc_node[0].astext().split(".")[-1].split("(")[0] + + # Ensure we've added another bullet list so that we nest inside the parent, + # not next to it + if len(toc_insertion_point) > 1 and isinstance( + toc_insertion_point[1], nodes.bullet_list + ): + to_add = _build_toc_node(docname, anchor=ref_id, text=ref_text) + toc_insertion_point = toc_insertion_point[1] + else: + to_add = _build_toc_node( + docname, + anchor=ref_id, + text=ref_text, + bullet=True, + ) + + toc_insertion_point.append(to_add) + + +class _TocDirective(SphinxDirective): + has_content = False + + def run(self): + mod = self.env.ref_context.get("py:module") + obj, _ = self.env.temp_data.get("object", [None, None]) + + if not hasattr(self.env, "custom_toc"): + self.env.custom_toc = {} + + for key in self.fmt(mod=mod, obj=obj): + self.env.custom_toc[key] = self.include_in_toc + return [] + + +class _SingleTocDirective(_TocDirective): + include_in_toc = False + + def fmt(self, mod, obj): + return [f"{mod}.{obj}" if obj else mod] + + +class NoTocDirective(_SingleTocDirective): + """Directive to exclude object and its members from sidebar nav.""" + + include_in_toc = False + + +class ForceTocDirective(_SingleTocDirective): + """Directive to include object and its members into sidebar nav.""" + + include_in_toc = True + + +class _TocChildrenDirective(_TocDirective): + optional_arguments = 1000 + + def fmt(self, mod, obj): + if self.arguments: + return [ + (f"{mod}.{obj}.{arg}" if obj else f"{mod}.{arg}").strip(",") + for arg in self.arguments + ] + else: + return [f"{mod}.{obj}.*" if obj else f"{mod}.*"] + + +class NoTocChildrenDirective(_TocChildrenDirective): + """Directive to exclude object members from sidebar nav. + + May take any number of optional arguments - concrete members to exclude. + If no arguments are given, all members are excluded. + """ + + include_in_toc = False + + +class ForceTocChildrenDirective(_TocChildrenDirective): + """Directive to include object members into sidebar nav. + + May take any number of optional arguments - concrete members to include. + If no arguments are given, all members are included. + """ + + include_in_toc = True + + +def setup(app): + """Set up this module as a sphinx extension.""" + app.add_directive("customtox-exclude", NoTocDirective) + app.add_directive("customtox-exclude-children", NoTocChildrenDirective) + app.add_directive("customtox-include", ForceTocDirective) + app.add_directive("customtox-include-children", ForceTocChildrenDirective) + + app.connect("doctree-resolved", add_domain_to_toctree) + + return {"version": sphinx.__display_version__, "parallel_read_safe": True} diff --git a/docs/source/ext/types_group.py b/docs/source/ext/types_group.py new file mode 100644 index 0000000..11d6757 --- /dev/null +++ b/docs/source/ext/types_group.py @@ -0,0 +1,44 @@ +"""Sphinx plugin to extract separate group with type aliases and validation helpers.""" +import sys + +import sphinx +from voluptuous import Schema + +if sys.version_info < (3, 10): + from typing_extensions import is_typeddict +else: + from typing import is_typeddict + + +def guess_group(app, what, name, obj, section, parent): + """Extract separate group with type aliases and validation helpers.""" + if ( + # We can use TypedDict for static validation + is_typeddict(obj) + # :mod:`voluptuous` for dynamic validation + or isinstance(obj, Schema) + # or declare type alias. + or obj.__class__.__module__ in {"typing", "typing_extensions"} + ): + return "Type or structure checkers" + + if "deprecated::" in (obj.__doc__ or ""): + return "Deprecated" + + +def skip_member(app, what, name, obj, skip, options): + """Keep documenting deprecated methods (they are not in __all__). + + As side effect this moves deprecated stuff to the end of module/class, + which is desired behaviour. + """ + if "deprecated::" in (obj.__doc__ or ""): + return False + + +def setup(app): + """Set up this module as a sphinx extension.""" + app.connect("autodocsumm-grouper", guess_group) + app.connect("autodoc-skip-member", skip_member) + + return {"version": sphinx.__display_version__, "parallel_read_safe": True} diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000..4095d4c --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,27 @@ +.. thor_devkit documentation master file, created by + sphinx-quickstart on Thu May 5 15:32:02 2022. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to thor-devkit.py's documentation! +========================================== + +.. toctree:: + :maxdepth: 3 + :caption: Contents: + + installation + abi + rlp + transaction + others + references + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/source/installation.rst b/docs/source/installation.rst new file mode 100644 index 0000000..0ae1754 --- /dev/null +++ b/docs/source/installation.rst @@ -0,0 +1,26 @@ +Installation +============ + +You can install ``thor_devkit`` with ``pip``: + +.. code-block:: bash + + pip install thor-devkit -U + +.. warning:: + + `Bip32 library `__ depends on the ``ripemd160`` hash library, which should be present on your system (on Linux it is part of `openssl `__). + +Installing from source: + +.. code-block:: bash + + git clone https://github.com/vechain/thor_devkit.py + cd thor_devkit.py + pip install . + + +Supported extras: + +- ``test``: install developer requirements (``pip install thor-devkit[test]``). +- ``docs``: install ``sphinx``-related packages (``pip install thor-devkit[test,docs]``). diff --git a/docs/source/others.rst b/docs/source/others.rst new file mode 100644 index 0000000..0e90a70 --- /dev/null +++ b/docs/source/others.rst @@ -0,0 +1,83 @@ +Useful models +============= + +:mod:`thor_devkit.cry.hdnode` +----------------------------- + +.. automodule:: thor_devkit.cry.hdnode + + +:mod:`thor_devkit.cry.keystore` +------------------------------- + +.. automodule:: thor_devkit.cry.keystore + :autosummary-private-members: + +:mod:`thor_devkit.certificate` +------------------------------ + +.. automodule:: thor_devkit.certificate + + +Various utilities and primitives +================================ + +:mod:`thor_devkit.cry.address` +------------------------------ + +.. automodule:: thor_devkit.cry.address + + +:mod:`thor_devkit.cry.mnemonic` +------------------------------- + +.. automodule:: thor_devkit.cry.mnemonic + + +:mod:`thor_devkit.cry.bloom` +---------------------------- + +.. automodule:: thor_devkit.bloom + :special-members: __contains__ + + +Hash and cryptography utilities +=============================== + +:mod:`thor_devkit.cry.blake2b` +------------------------------ + +.. automodule:: thor_devkit.cry.blake2b + + +:mod:`thor_devkit.cry.keccak` +----------------------------- + +.. automodule:: thor_devkit.cry.keccak + + +:mod:`thor_devkit.cry.secp256k1` +-------------------------------- + +.. automodule:: thor_devkit.cry.secp256k1 + + +Implementation details +====================== + +:mod:`thor_devkit.exceptions` +----------------------------- + +.. automodule:: thor_devkit.exceptions + + +:mod:`thor_devkit.cry.utils` +----------------------------- + +.. automodule:: thor_devkit.cry.utils + + +:mod:`thor_devkit.validation` +----------------------------- + +.. automodule:: thor_devkit.validation diff --git a/docs/source/references.rst b/docs/source/references.rst new file mode 100644 index 0000000..b05a1d2 --- /dev/null +++ b/docs/source/references.rst @@ -0,0 +1,5 @@ +Useful references +================= + +* `Catalog of Bitcoin Improvement Proposals `_ +* `Catalog of VeChain Enhancement Proposals `_ diff --git a/docs/source/rlp.rst b/docs/source/rlp.rst new file mode 100644 index 0000000..be38fe5 --- /dev/null +++ b/docs/source/rlp.rst @@ -0,0 +1,145 @@ +RLP encoding +============ + +RLP (**r**\ ecursive **l**\ ength **p**\ refix) is a common algorithm for encoding +of variable length binary data. RLP encodes data before storing on disk +or transmitting via network. + +Theory +------ + +Encoding +******** + +Primary RLP can only deal with "item" type, which is defined as: + +#. Byte string (:class:`bytes` or :class:`bytearray` in Python) or +#. Sequence of items (usually :class:`list`). + +Some examples are: + +* ``b'\x00\xff'`` +* empty list ``[]`` +* list of bytes ``[b'\x00', b'\x01\x03']`` +* list of combinations ``[[], b'\x00', [b'\x00']]`` + +The encoded result is always a byte string: + +.. graphviz:: + :caption: RLP encoding diagram + :alt: RLP encoding diagram + :align: center + + digraph RLP_basic { + rankdir="LR"; + item [shape="box", label="Item"]; + rlp [shape="box", label="RLP"]; + item -> rlp [label="Encoding"]; + } + +Encoding algorithm +****************** + +Given ``x`` item as input, we define ``rlp_encode`` as the following algorithm: + + Let ``concat`` be a function that joins given bytes into single byte sequence. + + #. If ``x`` is a single byte and ``0x00 <= x <= 0x7F``, ``rlp_encode(x) = x``. + + #. Otherwise, if ``x`` is a byte string, Let ``len(x)`` be length of ``x`` in bytes + and define encoding as follows: + + * If ``0 < len(x) < 0x38`` + (note that empty byte string fulfills this requirement, as well as ``b'0x80'``):: + + rlp_encode(x) = concat(0x80 + len(x), x) + + In this case first byte is in range ``[0x80; 0xB7]``. + + * If ``0x38 <= len(x) <= 0xFFFFFFFF``:: + + rlp_encode(x) = concat(0xB7 + len(len(x)), len(x), x) + + In this case first byte is in range ``[0xB8; 0xBF]``. + + * For longer strings encoding is ``undefined``. + + #. Otherwise, if ``x`` is a list, let ``s = concat(map(rlp_encode, x))`` + be concatenation of RLP encodings of all its items. + + * If ``0 < len(s) < 0x38`` (note that empty list matches):: + + rlp_encode(x) = concat(0xC0 + len(s), s) + + In this case first byte is in range ``[0xC0; 0xF7]``. + + * If ``0x38 <= len(s) <= 0xFFFFFFFF``:: + + rlp_encode(x) = concat(0xF7 + len(len(s)), len(s), x) + + In this case first byte is in range ``[0xF8; 0xFF]``. + + * For longer lists encoding is ``undefined``. + +See more in `Ethereum wiki `__. + +Encoding examples +***************** + +.. table:: Encoding examples + :width: 100% + + +-------------------+--------------------------------+ + | ``x`` | ``rlp_encode(x)`` | + +===================+================================+ + | ``b''`` | ``0x80`` | + +-------------------+--------------------------------+ + | ``b'\x00'`` | ``0x00`` | + +-------------------+--------------------------------+ + | ``b'\x0F'`` | ``0x0F`` | + +-------------------+--------------------------------+ + | ``b'\x79'`` | ``0x79`` | + +-------------------+--------------------------------+ + | ``b'\x80'`` | ``0x81 0x80`` | + +-------------------+--------------------------------+ + | ``b'\xFF'`` | ``0x81 0xFF`` | + +-------------------+--------------------------------+ + | ``b'foo'`` | ``0x83 0x66 0x6F 0x6F`` | + +-------------------+--------------------------------+ + | ``[]`` | ``0xC0`` | + +-------------------+--------------------------------+ + | ``[b'\x0F']`` | ``0xC1 0x0F`` | + +-------------------+--------------------------------+ + | ``[b'\xEF']`` | ``0xC1 0x81 0xEF`` | + +-------------------+--------------------------------+ + | ``[[], [[]]]`` | ``0xC3 0xC0 0xC1 0xC0`` | + +-------------------+--------------------------------+ + + +Serialization +************* + +However, in the real world, the inputs are not pure bytes nor lists. +Some are of complex key-value pairs like :class:`dict`. +Some are of ``"0x123"`` form of number. + +This module exists for some pre-defined conversion, *serialization*: + +.. graphviz:: + :caption: Actual RLP encoding diagram + :alt: Actual RLP encoding diagram + :align: center + + digraph RLP_basic { + rankdir="LR"; + item [shape="box", label="Item"]; + obj [shape="box", label="Real world\nobject"] + rlp [shape="box", label="RLP"]; + obj -> item [label="Serialization"] + item -> rlp [label="Encoding"]; + } + +API documentation +----------------- + +.. automodule:: thor_devkit.rlp diff --git a/docs/source/transaction.rst b/docs/source/transaction.rst new file mode 100644 index 0000000..0011b7f --- /dev/null +++ b/docs/source/transaction.rst @@ -0,0 +1,5 @@ +Transactions +============ + +.. automodule:: thor_devkit.transaction + :autosummary-no-nesting: diff --git a/py.typed b/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..a65d0f7 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,201 @@ +[build-system] +requires = [ + 'setuptools;python_version>="3.7"', + # python3.6 is legacy and doesn't support pyproject.toml properly. + # Use a backport. + 'ppsetuptools;python_version<"3.7"', + # toml is in standard library in 3.11+ + 'toml>=0.10.1;python_version<"3.11"', +] +build-backend = "setuptools.build_meta" + +[project] +dynamic = ["version"] +name = "thor-devkit" +authors = [{name = "laalaguer", email = "laalaguer@gmail.com"}] +readme = "README.md" +description = "SDK to interact with VeChain Thor public blockchain." +license = { file = "LICENSE" } +classifiers = [ + "Programming Language :: Python :: 3", + "Intended Audience :: Developers", + "Topic :: Software Development :: Build Tools", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", + "Operating System :: OS Independent", +] +keywords = ["vechain", "thor", "blockchain", "sdk"] +dependencies = [ + 'bip-utils>=1.0.5,<3.0.0', + "ecdsa>=0.16.1,<0.18.0", + 'eth-abi>=2.1.1,<3.1.0', + 'eth-keyfile>=0.5.0,<0.7.0', + 'eth-keys>=0.3.3,<0.5.0', + 'eth-utils>=1.2.0,<2.1.0', + "mnemonic>=0.19,<=0.21", + "pysha3>=1.0.2,<1.1.0", + "py-solc-x>=1.0,<1.2", + 'rlp>=1.2.0,<3.1.0', + "typing_extensions>=4.1.0,<5.0.0", + "twine", + "voluptuous>=0.12.0,<0.14.0", +] +requires-python = ">=3.6.2" + +[tool.setuptools.dynamic] +version = {attr = "thor_devkit.VERSION"} + +[project.optional-dependencies] +test = [ + 'mypy>=0.942', + 'pytest>=6.4.0', + "pytest-cov", + "pytest-mock", + "tox", +] +docs = [ + 'docutils>=0.14,<0.18', # Sphinx haven't upgraded yet + "sphinx>=4.5.0,<5.0.0", + "sphinx-rtd-theme", + "autodocsumm>=0.2.8,<0.3.0", +] + +[project.urls] +Home = "https://github.com/laalaguer/thor-devkit.py" +Documentation = "https://github.com/laalaguer/thor-devkit.py" # FIXME: rtd +Source = "https://github.com/laalaguer/thor-devkit.py" +Issues = "https://github.com/laalaguer/thor-devkit.py/issues" + + +[tool.pytest.ini_options] +addopts = """ + --cov=thor_devkit + --no-cov-on-fail + --cov-report=term-missing + --cov-branch + --doctest-modules + --doctest-continue-on-failure + --ignore=docs +""" + + +[tool.coverage.run] +omit = [ + "tests/*", + "thor_devkit/deprecation.py", +] + +[tool.coverage.report] +exclude_lines = [ + # Explicitly ignored + "pragma: no cover", + # Often used in abstract classes + "raise NotImplementedError", + # Debug code + 'if self\.debug:', + "def __repr__", + # Scripts entrypoints + "if __name__ == .__main__.:", + # Should never run + '@(abc\.)?abstractmethod', + # Deprecated stuff + "@deprecated_to_property", + "@renamed_function", + "@renamed_method", + "@renamed_class", + "@deprecated", + # Typing artifact, that has no implementation + "@overload", + # Typing artifact, False at runtime + 'if (typing\.)?TYPE_CHECKING:' +] + + +[tool.mypy] +allow_redefinition = true +check_untyped_defs = true +ignore_missing_imports = true +incremental = true +strict_optional = true +no_implicit_optional = true +show_traceback = true +warn_unused_ignores = true +warn_redundant_casts = true +warn_unused_configs = true +warn_unreachable = true +exclude = [ + "docs/*", # No plans to typecheck sphinx plugins +] + +# Enforce stricter validation for library code +[[tool.mypy.overrides]] +module = "thor_devkit.*" +disallow_untyped_defs = true +disallow_incomplete_defs = true +disallow_any_generics = true +warn_no_return = true + + +[tool.flake8] +exclude = """ + .git, + .github, + __pycache__, + .pytest_cache, + .env, + env, + .pyenv, + pyenv, + *.egg_info, +""" +max_line_length = 88 +extend_ignore = [ + "SIM905", # Allow statement "hello world".split() instead of list literal + "N806", # Allow non-"lower_underscore" variables (it's too stupid rule) + "PIE798", # Allow class with only static methods for namespacing + "D105", # Magic methods may remain unannotated + "D401", # Imperative mood of first docstring line is not always encouraged + "RST306", # Plugin can't resolve links defined in other docstrings. + "RST304", # No builtin roles, so too much to do manually. + "E203", # Invalid. Expressions like `[len(x) :]` conform with PEP8, but raise this. +] +# Ignore +per-file-ignores = [ + "tests/*:D,RST", # We don't care about docstrings in tests. +] +max-complexity = 12 + +# Docstring validation +rst-directives = [ + "versionadded", + "versionchanged", + "deprecated", + "code-block", + # autodocsumm plugin + "autoclasssumm", + # From custom plugin + "customtox-exclude", + "customtox-exclude-children", + "customtox-include", + "customtox-include-children", +] +docstring-convention = "numpy" + + +[tool.isort] +profile = "black" + + +[tool.tox] +legacy_tox_ini = """ +[tox] +envlist = clean,py36,py37,py38,py39,py310 + +[testenv] +deps = .[test] +commands = pytest --cov-append + +[testenv:clean] +deps = coverage[toml] +skip_install = true +commands = coverage erase +""" diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index a9f59c7..0000000 --- a/requirements.txt +++ /dev/null @@ -1,13 +0,0 @@ -pysha3==1.0.2 -pytest>=6.2.0 -pytest-cov -ecdsa==0.16.1 -eth-keys==0.3.3 -mnemonic==0.19 -bip-utils==1.0.5 -eth-keyfile==0.5.1 -rlp==1.2.0 -eth-abi==2.1.1 -voluptuous==0.12.0 -wheel -twine diff --git a/setup.py b/setup.py index d32121d..f62ed39 100644 --- a/setup.py +++ b/setup.py @@ -1,33 +1,11 @@ -import setuptools +"""Required only to allow editable installs.""" +import sys -long_description = '' -with open("README.md", "r") as fh: - long_description = fh.read() -assert long_description +if sys.version_info < (3, 7): + import ppsetuptools -setuptools.setup( - name="thor-devkit", - version="1.0.12", - author="laalaguer", - author_email="laalaguer@gmail.com", - description="SDK to interact with VeChain Thor public blockchain.", - keywords="vechain thor blockchain sdk", - long_description=long_description, - long_description_content_type="text/markdown", - classifiers=[ - "Programming Language :: Python :: 3", - 'Intended Audience :: Developers', - 'Topic :: Software Development :: Build Tools', - "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", - "Operating System :: OS Independent", - ], - url="https://github.com/laalaguer/thor-devkit.py", - project_urls={ - 'Documentation': 'https://github.com/laalaguer/thor-devkit.py', - 'Source': 'https://github.com/laalaguer/thor-devkit.py', - 'Issue Tracker': 'https://github.com/laalaguer/thor-devkit.py/issues', - }, - python_requires='>=3.6', - install_requires=[x.strip() for x in open('requirements.txt')], - packages=setuptools.find_packages(), -) \ No newline at end of file + ppsetuptools.setup() +else: + import setuptools + + setuptools.setup() diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..d75e5e5 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,6 @@ +import pytest + + +@pytest.fixture() +def dyn_prefix(): + return bytes.fromhex("20".rjust(64, "0")) diff --git a/tests/test_abi.py b/tests/test_abi.py deleted file mode 100644 index f6955e4..0000000 --- a/tests/test_abi.py +++ /dev/null @@ -1,381 +0,0 @@ -import pytest -from thor_devkit import abi -from thor_devkit import cry - -f1 = abi.FUNCTION({ - "constant": False, - "inputs": [ - { - "name": "a1", - "type": "uint256" - }, - { - "name": "a2", - "type": "string" - } - ], - "name": "f1", - "outputs": [ - { - "name": "r1", - "type": "address" - }, - { - "name": "r2", - "type": "bytes" - } - ], - "payable": False, - "stateMutability": "nonpayable", - "type": "function" -}) - - -f2 = abi.FUNCTION({ - "inputs": [], - "name": "nodes", - "payable": False, - "outputs": [ - { - "components": [ - { - "internalType": "address", - "name": "master", - "type": "address" - }, - { - "internalType": "address", - "name": "endorsor", - "type": "address" - }, - { - "internalType": "bytes32", - "name": "identity", - "type": "bytes32" - }, - { - "internalType": "bool", - "name": "active", - "type": "bool" - } - ], - "internalType": "struct AuthorityUtils.Candidate[]", - "name": "list", - "type": "tuple[]" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }) - -# Solidity -# function getStr() public pure returns (string memory) { -# return "Hello World!"; -# } - -f3 = abi.FUNCTION({ - "inputs": [], - "name": "getStr", - "outputs": [ - { - "internalType": "string", - "name": "", - "type": "string" - } - ], - "stateMutability": "pure", - "type": "function" -}) - -# Solidity -# function getBool() public pure returns (bool) { -# return true; -# } -f4 = abi.FUNCTION( - { - "inputs": [], - "name": "getBool", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "pure", - "type": "function" - } -) - -# function getBigNumbers() public pure returns (uint256 a, int256 b) { -# return (123456, -123456); -# } -f5 = abi.FUNCTION( - { - "inputs": [], - "name": "getBigNumbers", - "outputs": [ - { - "internalType": "uint256", - "name": "a", - "type": "uint256" - }, - { - "internalType": "int256", - "name": "b", - "type": "int256" - } - ], - "stateMutability": "pure", - "type": "function" - } -) - - -e1 = abi.EVENT({ - "anonymous": False, - "inputs": [ - { - "indexed": True, - "name": "a1", - "type": "uint256" - }, - { - "indexed": False, - "name": "a2", - "type": "string" - } - ], - "name": "E1", - "type": "event" -}) - - -e2 = abi.EVENT({ - "anonymous": True, - "inputs": [ - { - "indexed": True, - "name": "a1", - "type": "uint256" - }, - { - "indexed": False, - "name": "a2", - "type": "string" - } - ], - "name": "E2", - "type": "event" -}) - - -e3 = abi.EVENT({ - "anonymous": False, - "inputs": [ - { - "indexed": True, - "name": "a1", - "type": "uint256" - } - ], - "name": "E3", - "type": "event" -}) - - -e4 = abi.EVENT({ - "inputs": [ - { - "indexed": True, - "name": "a1", - "type": "string" - } - ], - "name": "E4", - "type": "event" -}) - -def test_coder(): - assert abi.Coder.encode_single( - 'uint256', - 2345675643 - ).hex() == '000000000000000000000000000000000000000000000000000000008bd02b7b' - - with pytest.raises(Exception): - abi.Coder.encode_single('bytes32', '0xdf3234') - - assert abi.Coder.encode_single( - 'bytes32', - bytes.fromhex('df32340000000000000000000000000000000000000000000000000000000000') - ).hex() == 'df32340000000000000000000000000000000000000000000000000000000000' - - assert abi.Coder.encode_list( - ['bytes'], - [ - bytes.fromhex('df3234') - ] - ).hex() == '00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000003df32340000000000000000000000000000000000000000000000000000000000' - - assert abi.Coder.encode_list( - ['bytes32[]'], - [ - [ - bytes.fromhex('df32340000000000000000000000000000000000000000000000000000000000'), - bytes.fromhex('fdfd000000000000000000000000000000000000000000000000000000000000') - ] - ] - ).hex() == '00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002df32340000000000000000000000000000000000000000000000000000000000fdfd000000000000000000000000000000000000000000000000000000000000' - - assert abi.Coder.decode_single( - 'uint256', - bytes.fromhex('0000000000000000000000000000000000000000000000000000000000000010') - ) == 16 - - assert abi.Coder.decode_single( - 'string', - bytes.fromhex('0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000848656c6c6f212521000000000000000000000000000000000000000000000000') - ) == "Hello!%!" - - -def test_function(): - f = abi.Function(f1) - assert f.selector.hex() == '27fcbb2f' - assert f.get_selector().hex() == '27fcbb2f' - assert f.get_name() == 'f1' - - assert f.encode([1, 'foo'], to_hex=True) == '0x27fcbb2f000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000003666f6f0000000000000000000000000000000000000000000000000000000000' - - expected = { - "0": '0xabc0000000000000000000000000000000000001', - "1": bytes.fromhex('666f6f'), - "r1": '0xabc0000000000000000000000000000000000001', - "r2": bytes.fromhex('666f6f') - } - assert expected == f.decode(bytes.fromhex('000000000000000000000000abc000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000003666f6f0000000000000000000000000000000000000000000000000000000000')) - - -def test_string(): - f = abi.Function(f3) - assert f.selector.hex() == 'b8c9e4ed' - assert f.get_selector().hex() == 'b8c9e4ed' - assert f.get_name() == 'getStr' - - expected = { - "0": "Hello World!" - } - assert expected == f.decode(bytes.fromhex('0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000c48656c6c6f20576f726c64210000000000000000000000000000000000000000')) - - -def test_bool(): - f = abi.Function(f4) - assert f.selector.hex() == '12a7b914' - assert f.get_selector().hex() == '12a7b914' - assert f.get_name() == 'getBool' - - expected = { - "0": True - } - - assert expected == f.decode(bytes.fromhex('0000000000000000000000000000000000000000000000000000000000000001')) - -def test_big_number(): - f = abi.Function(f5) - assert f.selector.hex() == 'ff0d6c7d' - assert f.get_selector().hex() == 'ff0d6c7d' - assert f.get_name() == 'getBigNumbers' - - expected = { - "0": 123456, - "1": -123456, - "a": 123456, - "b": -123456 - } - - assert expected == f.decode(bytes.fromhex('000000000000000000000000000000000000000000000000000000000001e240fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe1dc0')) - -# def test_abiv2(): -# f = abi.Function(f2) - -# output_hex = '000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000060000000000000000000000006935455ef590eb8746f5230981d09d3552398018000000000000000000000000b5358b034647202d0cd3d1bf615e63e498e0268249984a53f9397370079bba8d95f5c15c743098fb318483e0cb6bbf46ec89ccfb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000005ff66ee3a3ea2aba2857ea8276edb6190d9a1661000000000000000000000000d51666c6b4fed6070a78691f1f3c8e79ad02e3a076f090d383f49d8faab2eb151241528a552f0ae645f460360a7635b8883987a60000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c5a02c1eac7516a9275d86c1cb39a5262b8684a4000000000000000000000000e32499b4143830f2526c79d388ecee530b6357aac635894a50ce5c74c62d238dbe95bd6a0fa076029d913d76b0d0b111c538153f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e8fd586e022f825a109848832d7e552132bc332000000000000000000000000224626926a7a12225a60e127cec119c939db4a5cdbf2712e19af00dc4d376728f7cb06cc215c8e7c53b94cb47cefb4a26ada2a6c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ea2e8c9d6dcad9e4be4f1c88a3befb8ea742832e0000000000000000000000001a011475baa1d368fa2d8328a1b7a8d848b62c94c68dc811199d40ff7ecd8c8d46454ad9ac5f5cde9bae32f927fec10d82dbdf7800000000000000000000000000000000000000000000000000000000000000000000000000000000000000004977d68df97bb313b23238520580d8d3a59939bf0000000000000000000000007ad1d568b3fe5bad3fc264aca70bc7bcd5e4a6ff83b137cf7e30864b8a4e56453eb1f094b4434685d86895de38ac2edcf5d3f5340000000000000000000000000000000000000000000000000000000000000000' - -# decoded = f.decode(bytes.fromhex(output_hex)) - - -def test_event(): - e = abi.Event(e1) - assert e.signature.hex() == '47b78f0ec63d97830ace2babb45e6271b15a678528e901a9651e45b65105e6c2' - assert e.get_signature().hex() == '47b78f0ec63d97830ace2babb45e6271b15a678528e901a9651e45b65105e6c2' - - assert e.decode( - bytes.fromhex('00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000003666f6f0000000000000000000000000000000000000000000000000000000000'), - [ - bytes.fromhex('47b78f0ec63d97830ace2babb45e6271b15a678528e901a9651e45b65105e6c2'), - bytes.fromhex('0000000000000000000000000000000000000000000000000000000000000001') - ] - ) == { "0": 1, "1": "foo", "a1": 1, "a2": "foo" } - - assert e.encode({ - 'a1': None, - }) == [ - bytes.fromhex('47b78f0ec63d97830ace2babb45e6271b15a678528e901a9651e45b65105e6c2'), - None - ] - - assert e.encode({ - 'a1': 1 - }) == [ - bytes.fromhex('47b78f0ec63d97830ace2babb45e6271b15a678528e901a9651e45b65105e6c2'), - bytes.fromhex('0000000000000000000000000000000000000000000000000000000000000001') - ] - - with pytest.raises(ValueError): - assert e.encode({ - 'a1': 1, - 'x': 3 - }) == [ - bytes.fromhex('47b78f0ec63d97830ace2babb45e6271b15a678528e901a9651e45b65105e6c2'), - bytes.fromhex('0000000000000000000000000000000000000000000000000000000000000001') - ] - - ee = abi.Event(e2) - assert ee.decode( - bytes.fromhex('00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000003666f6f0000000000000000000000000000000000000000000000000000000000'), - [ - bytes.fromhex('0000000000000000000000000000000000000000000000000000000000000001') - ] - ) == { "0": 1, "1": "foo", "a1": 1, "a2": "foo" } - - assert ee.encode({ - 'a1': 1 - }) == [ - bytes.fromhex('0000000000000000000000000000000000000000000000000000000000000001') - ] - - assert ee.encode([1]) == [ - bytes.fromhex('0000000000000000000000000000000000000000000000000000000000000001') - ] - - eee = abi.Event(e3) - assert eee.encode({ - 'a1': 1 - }) == [ - bytes.fromhex('e96585649d926cc4f5031a6113d7494d766198c0ac68b04eb93207460f9d7fd2'), - bytes.fromhex('0000000000000000000000000000000000000000000000000000000000000001') - ] - - assert eee.decode( - bytes.fromhex('00'), - [ - bytes.fromhex('e96585649d926cc4f5031a6113d7494d766198c0ac68b04eb93207460f9d7fd2'), - bytes.fromhex('0000000000000000000000000000000000000000000000000000000000000001') - ] - ) == { "0": 1, "a1": 1 } - - eeee = abi.Event(e4) - assert eeee.encode({ - 'a1': 'hello' - }) == [ - eeee.signature, - cry.keccak256(['hello'.encode('utf-8')])[0] - ] \ No newline at end of file diff --git a/tests/test_abi_coder.py b/tests/test_abi_coder.py new file mode 100644 index 0000000..cd73383 --- /dev/null +++ b/tests/test_abi_coder.py @@ -0,0 +1,141 @@ +import pytest +from eth_abi.exceptions import EncodingTypeError + +from thor_devkit import abi + +BYTES = ["df3234", "aa0033", "123450", "02aaaa", "00aaaa", "aaaa00"] + + +@pytest.fixture(params=BYTES) +def bytes_3(request): + return bytes.fromhex(request.param) + + +@pytest.fixture() +def bytes_32(bytes_3): + return bytes.fromhex(bytes_3.hex().ljust(64, "0")) + + +@pytest.fixture(params=BYTES) +def bytes_3_2(request): + return bytes.fromhex(request.param) + + +@pytest.fixture() +def bytes_32_2(bytes_3_2): + return bytes.fromhex(bytes_3_2.hex().ljust(64, "0")) + + +@pytest.fixture(params=[0, 1, 2, 2345675643, 2**256 - 1]) +def int_256(request): + return request.param + + +@pytest.fixture() +def int_256_enc(int_256): + return bytes.fromhex(hex(int_256)[2:].rjust(64, "0")) + + +# Note that "\u0404" (euro sign AFAIR) is 2 bytes long. We *must* support unicode, +# so this edge-case is important +@pytest.fixture(params=["", "foo", "Hello, beautiful world!", "\u0404"]) +def string(request): + return request.param + + +@pytest.fixture() +def string_enc(string): + enc = string.encode() + return bytes.fromhex(hex(len(enc))[2:].rjust(64, "0") + enc.hex().ljust(64, "0")) + + +@pytest.fixture() +def bytes_32_array(bytes_32, bytes_32_2): + return [bytes_32, bytes_32_2] + + +@pytest.fixture() +def bytes_32_dynarray_enc(bytes_32_array, dyn_prefix): + return b"".join( + [ + dyn_prefix, + bytes.fromhex(hex(len(bytes_32_array))[2:].rjust(64, "0")), + *bytes_32_array, + ] + ) + + +def test_bytes_fixed_coder(bytes_3, bytes_32, dyn_prefix): + with pytest.raises(EncodingTypeError): + abi.Coder.encode_single("bytes32", "0x" + bytes_3.hex()) + + assert abi.Coder.encode_single("bytes32", bytes_32).hex() == bytes_32.hex() + assert abi.Coder.decode_single("bytes32", bytes_32).hex() == bytes_32.hex() + + +def test_bytes_dynamic_coder(bytes_3, dyn_prefix): + assert ( + # Without exact length it's ok + abi.Coder.encode_list(["bytes"], [bytes_3]).hex() + == dyn_prefix.hex() + "3".rjust(64, "0") + bytes_3.hex().ljust(64, "0") + ) + + +def test_bytes_dynarray_coder(bytes_32_array, bytes_32_dynarray_enc): + assert ( + abi.Coder.encode_list(["bytes32[]"], [bytes_32_array]).hex() + == bytes_32_dynarray_enc.hex() + ) + assert ( + abi.Coder.encode_single("bytes32[]", bytes_32_array).hex() + == bytes_32_dynarray_enc.hex() + ) + + # Arrays are decoded as tuples + assert abi.Coder.decode_list(["bytes32[]"], bytes_32_dynarray_enc) == [ + tuple(bytes_32_array) + ] + assert abi.Coder.decode_single("bytes32[]", bytes_32_dynarray_enc) == tuple( + bytes_32_array + ) + + +def test_bytes_fixarray_coder(bytes_32_array): + assert ( + abi.Coder.encode_list(["bytes32[2]"], [bytes_32_array]).hex() + == b"".join(bytes_32_array).hex() + ) + assert ( + abi.Coder.encode_single("bytes32[2]", bytes_32_array).hex() + == b"".join(bytes_32_array).hex() + ) + + # Arrays are decoded as tuples + assert abi.Coder.decode_list(["bytes32[2]"], b"".join(bytes_32_array)) == [ + tuple(bytes_32_array) + ] + assert abi.Coder.decode_single("bytes32[2]", b"".join(bytes_32_array)) == ( + tuple(bytes_32_array) + ) + + +def test_int_coder(int_256, int_256_enc): + assert abi.Coder.encode_list(["uint256"], [int_256]).hex() == int_256_enc.hex() + assert abi.Coder.encode_single("uint256", int_256).hex() == int_256_enc.hex() + + assert abi.Coder.decode_list(["uint256"], int_256_enc) == [int_256] + assert abi.Coder.decode_single("uint256", int_256_enc) == int_256 + + +def test_string_coder(string, string_enc, dyn_prefix): + assert ( + abi.Coder.encode_single("string", string).hex() + == dyn_prefix.hex() + string_enc.hex() + ) + assert abi.Coder.decode_single("string", dyn_prefix + string_enc) == string + + assert ( + abi.Coder.encode_list(["string"], [string]).hex() + == dyn_prefix.hex() + string_enc.hex() + ) + assert abi.Coder.decode_list(["string"], dyn_prefix + string_enc) == [string] diff --git a/tests/test_abi_events.py b/tests/test_abi_events.py new file mode 100644 index 0000000..836b611 --- /dev/null +++ b/tests/test_abi_events.py @@ -0,0 +1,984 @@ +import os +from tempfile import mkstemp + +import pytest +from solcx.exceptions import SolcError + +from thor_devkit import cry +from thor_devkit.abi import EVENT, Coder, Event + + +# *********************** FIXTURES ************************** +@pytest.fixture() +def simple_event_no_hash(): + return EVENT( + { + "anonymous": False, + "inputs": [ + {"indexed": True, "name": "a1", "type": "uint256"}, + {"indexed": False, "name": "a2", "type": "string"}, + ], + "name": "E1", + "type": "event", + } + ) + + +@pytest.fixture() +def anonymous_event_no_hash(): + return EVENT( + { + "anonymous": True, + "inputs": [ + { + "indexed": True, + "name": "a1", + "type": "uint256", + "internalType": "uint256", + }, + { + "indexed": False, + "name": "a2", + "type": "string", + "internalType": "string", + }, + ], + "name": "E2", + "type": "event", + } + ) + + +@pytest.fixture() +def simple_event_hash(): + return EVENT( + { + "inputs": [{"indexed": True, "name": "a1", "type": "string"}], + "name": "E4", + "type": "event", + } + ) + + +@pytest.fixture() +def simple_event_int(): + return EVENT( + { + "anonymous": False, + "inputs": [{"indexed": True, "name": "a1", "type": "uint256"}], + "name": "E3", + "type": "event", + } + ) + + +@pytest.fixture() +def tuple_event(): + return EVENT( + { + "inputs": [ + { + "indexed": True, + "name": "a1", + "type": "tuple", + "components": [ + {"name": "b1", "type": "string"}, + {"name": "b2", "type": "string"}, + {"name": "b3", "type": "uint8"}, + ], + }, + {"indexed": True, "name": "a2", "type": "bytes"}, + {"indexed": False, "name": "a3", "type": "bool"}, + ], + "name": "E4", + "type": "event", + } + ) + + +@pytest.fixture() +def tuple_fixed_array_event(): + return EVENT( + { + "inputs": [ + { + "indexed": True, + "name": "a1", + "type": "tuple[3]", + "components": [ + {"name": "b1", "type": "string"}, + {"name": "b2", "type": "string"}, + {"name": "b3", "type": "uint8"}, + ], + }, + {"indexed": True, "name": "a2", "type": "string"}, + {"indexed": False, "name": "a3", "type": "bool"}, + ], + "name": "E4", + "type": "event", + } + ) + + +@pytest.fixture() +def tuple_dynamic_array_event(): + return EVENT( + { + "inputs": [ + { + "indexed": True, + "name": "a1", + "type": "tuple[]", + "components": [ + {"name": "b1", "type": "string"}, + {"name": "b2", "type": "string"}, + {"name": "b3", "type": "uint8"}, + ], + }, + {"indexed": True, "name": "a2", "type": "string"}, + {"indexed": False, "name": "a3", "type": "bool"}, + ], + "name": "E4", + "type": "event", + } + ) + + +@pytest.fixture() +def fixed_array_event(): + return EVENT( + { + "inputs": [ + {"indexed": True, "name": "a1", "type": "int16[3]"}, + ], + "name": "E4", + "type": "event", + } + ) + + +@pytest.fixture() +def dynamic_array_event(): + return EVENT( + { + "inputs": [ + {"indexed": True, "name": "a1", "type": "int16[]"}, + ], + "name": "E4", + "type": "event", + } + ) + + +@pytest.fixture() +def unindexed_struct_event(): + return EVENT( + { + "inputs": [ + { + "indexed": False, + "name": "a1", + "type": "tuple", + "components": [ + {"name": "b1", "type": "bool"}, + {"name": "b2", "type": "string"}, + ], + }, + ], + "name": "E4", + "type": "event", + } + ) + + +@pytest.fixture() +def unindexed_struct_fixed_array_event(): + return EVENT( + { + "inputs": [ + { + "indexed": False, + "name": "a1", + "type": "tuple[3]", + "components": [ + {"name": "b1", "type": "bool"}, + {"name": "b2", "type": "string"}, + ], + }, + ], + "name": "E4", + "type": "event", + } + ) + + +@pytest.fixture() +def unindexed_struct_dynamic_array_event(): + return EVENT( + { + "inputs": [ + { + "indexed": False, + "name": "a1", + "type": "tuple[]", + "components": [ + {"name": "b1", "type": "bool"}, + {"name": "b2", "type": "string"}, + ], + }, + ], + "name": "E4", + "type": "event", + } + ) + + +@pytest.fixture() +def unindexed_struct_nested_event(): + return EVENT( + { + "inputs": [ + { + "indexed": False, + "name": "a1", + "type": "tuple[]", + "components": [ + { + "name": "b1", + "type": "tuple", + "components": [ + {"name": "c1", "type": "bool"}, + {"name": "c2", "type": "bool"}, + ], + }, + {"name": "b2", "type": "string"}, + ], + }, + ], + "name": "E4", + "type": "event", + } + ) + + +@pytest.fixture() +def only_unindexed_event(): + return EVENT( + { + "inputs": [ + {"indexed": False, "name": "a1", "type": "int16"}, + {"indexed": False, "name": "a2", "type": "string"}, + {"indexed": False, "name": "a3", "type": "bool[3]"}, + {"indexed": False, "name": "a4", "type": "bool[]"}, + ], + "name": "E4", + "type": "event", + } + ) + + +@pytest.fixture() +def mixed_indexed_unindexed_event(): + return EVENT( + { + "inputs": [ + {"indexed": False, "name": "a1", "type": "int16"}, + {"indexed": False, "name": "a2", "type": "string"}, + {"indexed": False, "name": "a3", "type": "bool[3]"}, + {"indexed": False, "name": "a4", "type": "bool[]"}, + {"indexed": True, "name": "b1", "type": "bool"}, + {"indexed": True, "name": "b2", "type": "bool"}, + {"indexed": True, "name": "b3", "type": "bytes"}, + ], + "name": "E4", + "type": "event", + } + ) + + +@pytest.fixture() +def too_much_indexed_event(): + return EVENT( + { + "inputs": [ + {"indexed": True, "name": "a1", "type": "bool"}, + {"indexed": True, "name": "a2", "type": "bool"}, + {"indexed": True, "name": "a3", "type": "bool"}, + {"indexed": True, "name": "a4", "type": "bool"}, + ], + "name": "E4", + "type": "event", + } + ) + + +@pytest.fixture() +def too_much_indexed_anon_event(): + return EVENT( + { + "inputs": [ + {"indexed": True, "name": "a1", "type": "bool"}, + {"indexed": True, "name": "a2", "type": "bool"}, + {"indexed": True, "name": "a3", "type": "bool"}, + {"indexed": True, "name": "a4", "type": "bool"}, + {"indexed": True, "name": "a5", "type": "bool"}, + ], + "name": "E4", + "type": "event", + } + ) + + +@pytest.fixture() +def multi_inputs_event(): + return EVENT( + { + "inputs": [ + {"name": "from", "indexed": True, "type": "address"}, + {"name": "value", "indexed": False, "type": "uint256"}, + {"name": "to", "indexed": True, "type": "address"}, + {"name": "value2", "indexed": False, "type": "uint64"}, + ], + "name": "MyEvent", + "type": "event", + } + ) + + +@pytest.fixture() +def foo_enc(): + return bytes.fromhex( + "20".rjust(64, "0") + "3".rjust(64, "0") + "666f6f".ljust(64, "0") + ) + + +@pytest.fixture() +def true_enc(): + return b"\x01".rjust(32, b"\x00") + + +# *********************************************************** + + +def test_event_basic_decode(simple_event_no_hash, foo_enc, true_enc): + e = Event(simple_event_no_hash) + + assert ( + e.signature.hex() + == "47b78f0ec63d97830ace2babb45e6271b15a678528e901a9651e45b65105e6c2" + ) + + assert e.decode(foo_enc, [e.signature, true_enc]).to_dict() == { + "a1": 1, + "a2": "foo", + } + + with pytest.raises(ValueError, match=r"First topic.+ signature"): + e.decode(foo_enc, [b"\x00", true_enc]) + + with pytest.raises(ValueError, match=r"Invalid topics count"): + e.decode(foo_enc, [e.signature]) + with pytest.raises(ValueError, match=r"Invalid topics count"): + e.decode(foo_enc, [e.signature, true_enc, true_enc]) + + assert e.decode(foo_enc, [e.signature, None]).to_dict() == { + "a1": None, + "a2": "foo", + } + with pytest.raises(ValueError, match=r"Invalid topics count"): + e.decode(foo_enc, [e.signature, true_enc, true_enc]) + + assert e.decode(foo_enc, None).to_dict() == {"a1": None, "a2": "foo"} + + +def test_event_basic_encode(simple_event_no_hash, true_enc): + e = Event(simple_event_no_hash) + + assert e.encode({"a1": None}) == [e.signature, None] + assert e.encode({"a1": 1}) == [e.signature, true_enc] + + with pytest.raises(ValueError, match=r".+ expected 1, got 2"): + e.encode({"a1": 1, "x": 3}) + + with pytest.raises(ValueError, match=r"Missing key.+"): + e.encode({"a2": 1}) + + with pytest.raises(TypeError): + e.encode(1) # type: ignore[arg-type] + + +def test_too_much_indexed(too_much_indexed_event, too_much_indexed_anon_event): + with pytest.raises(ValueError, match="Too much indexed parameters!"): + Event(too_much_indexed_event) + + with pytest.raises(ValueError, match="Too much indexed parameters!"): + Event(too_much_indexed_anon_event) + + +def test_event_anonymous(anonymous_event_no_hash, foo_enc, true_enc): + e = Event(anonymous_event_no_hash) + decoded = e.decode( + foo_enc, + [true_enc], + ) + assert decoded.to_dict() == {"a1": 1, "a2": "foo"} + assert decoded.a1 == 1 + assert decoded.a2 == "foo" + with pytest.raises(AttributeError): + decoded.non_existent + + assert e.decode(foo_enc, [None]).to_dict() == { + "a1": None, + "a2": "foo", + } + with pytest.raises(ValueError, match=r"Invalid topics count"): + e.decode(foo_enc, [e.signature, None]) + + assert e.decode(foo_enc, None).to_dict() == {"a1": None, "a2": "foo"} + + assert e.encode({"a1": 1}) == [true_enc] + assert e.encode([1]) == [true_enc] + + +def test_event_hashed(simple_event_hash): + e = Event(simple_event_hash) + hashed = cry.keccak256([b"hello"])[0] + + assert e.encode({"a1": "hello"}) == [e.signature, hashed] + + assert e.decode(b"\x00", [e.signature, hashed]).to_dict() == {"a1": hashed} + + +def test_simple_int_event(simple_event_int, true_enc): + e = Event(simple_event_int) + assert ( + e.signature.hex() + == "e96585649d926cc4f5031a6113d7494d766198c0ac68b04eb93207460f9d7fd2" + ) + + assert e.encode({"a1": 1}) == [ + e.signature, + true_enc, + ] + + assert e.decode( + bytes.fromhex("00"), + [e.signature, true_enc], + ).to_dict() == {"a1": 1} + + +def test_event_tuple_abiv2(tuple_event, true_enc): + e = Event(tuple_event) + + expected = [ + e.signature, + cry.keccak256( + [ + b"bar1".ljust(32, b"\x00") + + b"bar2".ljust(32, b"\x00") + + b"\x08".rjust(32, b"\x00") + ] + )[0], + cry.keccak256([b"baz"])[0], + ] + given = e.encode([("bar1", "bar2", 8), b"baz"]) + assert [(d.hex() if d is not None else None) for d in given] == [ + d.hex() for d in expected + ] + + assert e.decode(true_enc, expected).to_dict() == { + "a1": expected[1], + "a2": expected[2], + "a3": True, + } + + +def test_event_tuple_dynarr_abiv2(tuple_dynamic_array_event, true_enc): + e = Event(tuple_dynamic_array_event) + expected = [ + e.signature, + cry.keccak256( + [ + b"bar1".ljust(32, b"\x00") + + b"bar2".ljust(32, b"\x00") + + b"\x08".rjust(32, b"\x00"), + b"bar3".ljust(32, b"\x00") + + b"bar4".ljust(32, b"\x00") + + b"\x07".rjust(32, b"\x00"), + b"bar5".ljust(32, b"\x00") + + b"bar6".ljust(32, b"\x00") + + b"\x06".rjust(32, b"\x00"), + ] + )[0], + cry.keccak256([b"baz"])[0], + ] + given = e.encode( + [ + ( + ("bar1", "bar2", 8), + ("bar3", "bar4", 7), + ("bar5", "bar6", 6), + ), + "baz", + ] + ) + + assert [(d.hex() if d is not None else None) for d in given] == [ + d.hex() for d in expected + ] + + assert e.decode(true_enc, expected).to_dict() == { + "a1": expected[1], + "a2": expected[2], + "a3": True, + } + + +def test_event_tuple_fixarr_abiv2(tuple_fixed_array_event): + e = Event(tuple_fixed_array_event) + expected = [ + e.signature, + cry.keccak256( + [ + b"bar1".ljust(32, b"\x00") + + b"bar2".ljust(32, b"\x00") + + b"\x08".rjust(32, b"\x00"), + b"bar3".ljust(32, b"\x00") + + b"bar4".ljust(32, b"\x00") + + b"\x07".rjust(32, b"\x00"), + b"bar5".ljust(32, b"\x00") + + b"bar6".ljust(32, b"\x00") + + b"\x06".rjust(32, b"\x00"), + ] + )[0], + cry.keccak256([b"baz"])[0], + ] + given = e.encode( + [ + ( + ("bar1", "bar2", 8), + ("bar3", "bar4", 7), + ("bar5", "bar6", 6), + ), + "baz", + ] + ) + assert [(d.hex() if d is not None else None) for d in given] == [ + d.hex() for d in expected + ] + + assert e.decode(b"\x00" * 32, expected).to_dict() == { + "a1": expected[1], + "a2": expected[2], + "a3": False, + } + + +def test_event_fixarr_abiv2(fixed_array_event): + e = Event(fixed_array_event) + expected = [ + e.signature, + cry.keccak256( + [ + b"\x07".rjust(32, b"\x00") + + b"\x08".rjust(32, b"\x00") + + b"\x09".rjust(32, b"\x00") + ] + )[0], + ] + given = e.encode([[7, 8, 9]]) + assert [(d.hex() if d is not None else None) for d in given] == [ + d.hex() for d in expected + ] + + assert e.decode(b"\x00", expected).to_dict() == {"a1": expected[1]} + + +def test_event_dynarr_abiv2(dynamic_array_event): + e = Event(dynamic_array_event) + expected = [ + e.signature, + cry.keccak256( + [ + b"\x07".rjust(32, b"\x00") + + b"\x08".rjust(32, b"\x00") + + b"\x09".rjust(32, b"\x00") + ] + )[0], + ] + given = e.encode([[7, 8, 9]]) + assert [(d.hex() if d is not None else None) for d in given] == [ + d.hex() for d in expected + ] + + assert e.decode(b"\x00", expected).to_dict() == { + "a1": expected[1], + } + + +def test_decode_only_unindexed(only_unindexed_event): + e = Event(only_unindexed_event) + + expected_data = { + "a1": 7, + "a2": "foo", + "a3": (True, True, False), + "a4": (False, True), + } + encoded = ( + # a1 + b"\x07".rjust(32, b"\x00") + # * a2 + + b"\xc0".rjust(32, b"\x00") + # a3 + + b"\x01".rjust(32, b"\x00") * 2 + + b"\x00".rjust(32, b"\x00") + # * a4 + + b"\x01\x00".rjust(32, b"\x00") + # len(a2) + + b"\x03".rjust(32, b"\x00") + # a2 + + b"foo".ljust(32, b"\x00") + # len(a4) + + b"\x02".rjust(32, b"\x00") + # a4 + + b"\x00".rjust(32, b"\x00") + + b"\x01".rjust(32, b"\x00") + ) + + # Don't trust myself + assert ( + Coder.encode_list( + ["int", "string", "bool[3]", "bool[]"], list(expected_data.values()) + ).hex() + == encoded.hex() + ) + + assert e.decode(encoded, [e.signature]).to_dict() == expected_data + + +def test_decode_mixed(mixed_indexed_unindexed_event): + e = Event(mixed_indexed_unindexed_event) + + coded_str = cry.keccak256([b"bazz"])[0] + indexed_enc = [b"\x01".rjust(32, b"\x00")] * 2 + [coded_str] + + expected_data = { + # Unindexed + "a1": 7, + "a2": "foo", + "a3": (True, True, False), + "a4": (False, True), + # Indexed, but not hashed + "b1": True, + "b2": True, + # Indexed, hashed + "b3": coded_str, + } + + encoded = ( + # a1 + b"\x07".rjust(32, b"\x00") + # *a2 + + b"\xc0".rjust(32, b"\x00") + # a3 + + b"\x01".rjust(32, b"\x00") * 2 + + b"\x00".rjust(32, b"\x00") + # *a4 + + b"\x01\x00".rjust(32, b"\x00") + # len(a2) + + b"\x03".rjust(32, b"\x00") + # a2 + + b"foo".ljust(32, b"\x00") + # len(a4) + + b"\x02".rjust(32, b"\x00") + # a4 + + b"\x00".rjust(32, b"\x00") + + b"\x01".rjust(32, b"\x00") + ) + + assert ( + Coder.encode_list( + ["int", "string", "bool[3]", "bool[]"], list(expected_data.values())[:4] + ).hex() + == encoded.hex() + ) + + assert e.decode(encoded, [e.signature, *indexed_enc]).to_dict() == expected_data + + +def test_decode_struct_unindexed(unindexed_struct_event): + e = Event(unindexed_struct_event) + + expected_data = {"a1": {"b1": True, "b2": "bar"}} + + encoded = ( + b"" + + b"\x20".rjust(32, b"\x00") # Start of meaningful part + + b"\x01".rjust(32, b"\x00") + + b"\x40".rjust(32, b"\x00") + + b"\x03".rjust(32, b"\x00") + + b"bar".ljust(32, b"\x00") + ) + assert ( + Coder.encode_single("(bool,string)", list(expected_data["a1"].values())).hex() + == encoded.hex() + ) + + assert e.decode(encoded, [e.signature]).to_dict() == expected_data + + +def test_decode_struct_fixarray_unindexed(unindexed_struct_fixed_array_event): + e = Event(unindexed_struct_fixed_array_event) + + expected_data = { + "a1": [ + {"b1": True, "b2": "bar1"}, + {"b1": False, "b2": "bar2"}, + {"b1": True, "b2": "bar3"}, + ] + } + + encoded = ( + b"" + # headers + + b"\x20".rjust(32, b"\x00") + + b"\x60".rjust(32, b"\x00") + + b"\xe0".rjust(32, b"\x00") + + b"\x01\x60".rjust(32, b"\x00") + # 1st + + b"\x01".rjust(32, b"\x00") + + b"\x40".rjust(32, b"\x00") + + b"\x04".rjust(32, b"\x00") + + b"bar1".ljust(32, b"\x00") + # 2nd + + b"\x00".rjust(32, b"\x00") + + b"\x40".rjust(32, b"\x00") + + b"\x04".rjust(32, b"\x00") + + b"bar2".ljust(32, b"\x00") + # 3rd + + b"\x01".rjust(32, b"\x00") + + b"\x40".rjust(32, b"\x00") + + b"\x04".rjust(32, b"\x00") + + b"bar3".ljust(32, b"\x00") + ) + assert ( + Coder.encode_single( + "(bool,string)[3]", [list(d.values()) for d in expected_data["a1"]] + ).hex() + == encoded.hex() + ) + + assert e.decode(encoded, [e.signature]).to_dict() == expected_data + + +def test_decode_struct_dynarray_unindexed(unindexed_struct_dynamic_array_event): + e = Event(unindexed_struct_dynamic_array_event) + + expected_data = { + "a1": [ + {"b1": True, "b2": "bar1"}, + {"b1": False, "b2": "bar2"}, + {"b1": True, "b2": "bar3"}, + ] + } + + encoded = ( + b"" + # headers + + b"\x20".rjust(32, b"\x00") + + b"\x03".rjust(32, b"\x00") # length + + b"\x60".rjust(32, b"\x00") + + b"\xe0".rjust(32, b"\x00") + + b"\x01\x60".rjust(32, b"\x00") + # 1st + + b"\x01".rjust(32, b"\x00") + + b"\x40".rjust(32, b"\x00") + + b"\x04".rjust(32, b"\x00") + + b"bar1".ljust(32, b"\x00") + # 2nd + + b"\x00".rjust(32, b"\x00") + + b"\x40".rjust(32, b"\x00") + + b"\x04".rjust(32, b"\x00") + + b"bar2".ljust(32, b"\x00") + # 3rd + + b"\x01".rjust(32, b"\x00") + + b"\x40".rjust(32, b"\x00") + + b"\x04".rjust(32, b"\x00") + + b"bar3".ljust(32, b"\x00") + ) + assert ( + Coder.encode_single( + "(bool,string)[]", [list(d.values()) for d in expected_data["a1"]] + ).hex() + == encoded.hex() + ) + + assert e.decode(encoded, [e.signature]).to_dict() == expected_data + + +def test_decode_struct_nested_unindexed(unindexed_struct_nested_event): + e = Event(unindexed_struct_nested_event) + + expected_data = { + "a1": [ + {"b1": {"c1": True, "c2": False}, "b2": "bar1"}, + {"b1": {"c1": False, "c2": True}, "b2": "bar2"}, + ] + } + + encoded = ( + b"" + # headers + + b"\x20".rjust(32, b"\x00") + + b"\x02".rjust(32, b"\x00") # length + + b"\x40".rjust(32, b"\x00") + + b"\xe0".rjust(32, b"\x00") + # 1st + + b"\x01".rjust(32, b"\x00") + + b"\x00".rjust(32, b"\x00") + + b"\x60".rjust(32, b"\x00") + + b"\x04".rjust(32, b"\x00") + + b"bar1".ljust(32, b"\x00") + # 2nd + + b"\x00".rjust(32, b"\x00") + + b"\x01".rjust(32, b"\x00") + + b"\x60".rjust(32, b"\x00") + + b"\x04".rjust(32, b"\x00") + + b"bar2".ljust(32, b"\x00") + ) + assert ( + Coder.encode_single( + "((bool,bool),string)[]", [((True, False), "bar1"), ((False, True), "bar2")] + ).hex() + == encoded.hex() + ) + + assert e.decode(encoded, [e.signature]).to_dict() == expected_data + + +def test_encode_full(multi_inputs_event): + event = Event(multi_inputs_event) + address_from = "0x" + "f" * 40 + address_to = "0x" + "9" * 40 + + topics_enc = event.encode([address_from, address_to]) + data_enc = event.encode_data([256, 127]) + + topics, data = event.encode_full([address_from, 256, address_to, 127]) + assert topics == topics_enc + assert data == data_enc + + topics, data = event.encode_full( + { + "to": address_to, + "value": 256, + "value2": 127, + "from": address_from, + } + ) + assert topics == topics_enc + assert data == data_enc + + with pytest.raises(ValueError, match=".+ shorter .+"): + event.encode_full([address_from, 256, address_to, 127, 0]) + with pytest.raises(ValueError, match=".+ shorter .+"): + event.encode_full([1, address_from, 256, address_to, 127]) + + with pytest.raises(ValueError, match="Invalid keys count"): + event.encode_full( + { + "to": address_to, + "value": 256, + "value2": 127, + "from": address_from, + "x": 1, + } + ) + + with pytest.raises(ValueError, match="Key 'from' is missing"): + event.encode_full( + { + "to": address_to, + "value": 256, + "value2": 127, + } + ) + + with pytest.raises(ValueError, match="Key 'from' is missing"): + event.encode_full( + { + "to": address_to, + "value": 256, + "value2": 127, + "not_address_from": address_from, + } + ) + + with pytest.raises(TypeError): + event.encode_full(1) # type: ignore[arg-type] + + +def test_encode_data(multi_inputs_event): + event = Event(multi_inputs_event) + + enc = event.encode_data([256, 129]) # 256 == 0x100, 129 == 0x81 + assert enc.hex() == "100".rjust(64, "0") + "81".rjust(64, "0") + + enc = event.encode_data({"value": 256, "value2": 129}) + assert enc.hex() == "100".rjust(64, "0") + "81".rjust(64, "0") + + +def test_from_text_ok(anonymous_event_no_hash): + code = R""" + contract A { + event E2(uint indexed a1, string a2) anonymous; + } + """ + e = Event.from_solidity(text=code) + assert e._definition == anonymous_event_no_hash + + +def test_from_file_ok(anonymous_event_no_hash): + code = R""" + contract A { + event E2(uint indexed a1, string a2) anonymous; + } + """ + fd, fpath = mkstemp(text=True) + with os.fdopen(fd, "w") as f: + f.write(code) + + e = Event.from_solidity(file=fpath) + assert e._definition == anonymous_event_no_hash + + +def test_missing_kind(anonymous_event_no_hash): + code = R""" + contract A { + function f() public pure returns (int) {} + } + """ + with pytest.raises(ValueError, match="Missing value"): + Event.from_solidity(text=code) + + +def test_mulitple_kind(anonymous_event_no_hash): + code = R""" + contract A { + event E1(uint indexed a1, string a2) anonymous; + event E2(uint indexed a1, string a2) anonymous; + } + """ + with pytest.raises(ValueError, match="Ambiguous input"): + Event.from_solidity(text=code) + + +def test_invalid_code(anonymous_event_no_hash): + code = R""" + contract A { + event E1(uint indexed a1, string a2) anonymous ?; + } + """ + with pytest.raises(SolcError): + Event.from_solidity(text=code) diff --git a/tests/test_abi_functions.py b/tests/test_abi_functions.py new file mode 100644 index 0000000..858ae42 --- /dev/null +++ b/tests/test_abi_functions.py @@ -0,0 +1,600 @@ +from typing import Any + +import pytest + +from thor_devkit.abi import Constructor, ConstructorT, Function, FunctionT + +# *********************** FIXTURES ************************** + + +@pytest.fixture() +def simple_dynamic(): + data: FunctionT = { + "inputs": [{"name": "a1", "type": "uint256"}, {"name": "a2", "type": "string"}], + "name": "f1", + "outputs": [{"name": "r1", "type": "address"}, {"name": "r2", "type": "bytes"}], + "stateMutability": "nonpayable", + "type": "function", + } + return Function(data) + + +@pytest.fixture() +def f_get_str(): + """ + function getStr() public pure returns (string memory) { + return "Hello World!"; + } + """ + data: FunctionT = { + "inputs": [], + "name": "getStr", + "outputs": [{"internalType": "string", "name": "memory", "type": "string"}], + "stateMutability": "pure", + "type": "function", + } + return Function(data) + + +@pytest.fixture() +def f_get_bool(): + """ + function getBool() public pure returns (bool) { + return true; + } + """ + data: FunctionT = { + "inputs": [], + "name": "getBool", + "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], + "stateMutability": "pure", + "type": "function", + } + return Function(data) + + +@pytest.fixture() +def f_get_big_numbers(): + """ + function getBigNumbers() public pure returns (uint256 a, int256 b) { + return (123456, -123456); + } + """ + data: FunctionT = { + "inputs": [], + "name": "getBigNumbers", + "outputs": [ + {"internalType": "uint256", "name": "a", "type": "uint256"}, + {"internalType": "int256", "name": "b", "type": "int256"}, + ], + "stateMutability": "pure", + "type": "function", + } + return Function(data) + + +@pytest.fixture() +def f_out_struct_dynarray(): + data: FunctionT = { + "inputs": [], + "name": "nodes", + "outputs": [ + { + "components": [ + {"internalType": "address", "name": "master", "type": "address"}, + {"internalType": "address", "name": "endorsor", "type": "address"}, + {"internalType": "bytes32", "name": "identity", "type": "bytes32"}, + {"internalType": "bool", "name": "active", "type": "bool"}, + ], + "internalType": "struct AuthorityUtils.Candidate[]", + "name": "list", + "type": "tuple[]", + } + ], + "stateMutability": "nonpayable", + "type": "function", + } + return Function(data) + + +@pytest.fixture() +def f_out_struct_dynarray_data(): + return { + "list": [ + { + "master": "0x6935455ef590eb8746f5230981d09d3552398018", + "endorsor": "0xb5358b034647202d0cd3d1bf615e63e498e02682", + "identity": bytes.fromhex( + "49984a53f9397370079bba8d95f5c15c743098fb318483e0cb6bbf46ec89ccfb" + ), + "active": False, + }, + { + "master": "0x5ff66ee3a3ea2aba2857ea8276edb6190d9a1661", + "endorsor": "0xd51666c6b4fed6070a78691f1f3c8e79ad02e3a0", + "identity": bytes.fromhex( + "76f090d383f49d8faab2eb151241528a552f0ae645f460360a7635b8883987a6" + ), + "active": False, + }, + { + "master": "0xc5a02c1eac7516a9275d86c1cb39a5262b8684a4", + "endorsor": "0xe32499b4143830f2526c79d388ecee530b6357aa", + "identity": bytes.fromhex( + "c635894a50ce5c74c62d238dbe95bd6a0fa076029d913d76b0d0b111c538153f" + ), + "active": False, + }, + { + "master": "0x0e8fd586e022f825a109848832d7e552132bc332", + "endorsor": "0x224626926a7a12225a60e127cec119c939db4a5c", + "identity": bytes.fromhex( + "dbf2712e19af00dc4d376728f7cb06cc215c8e7c53b94cb47cefb4a26ada2a6c" + ), + "active": False, + }, + { + "master": "0xea2e8c9d6dcad9e4be4f1c88a3befb8ea742832e", + "endorsor": "0x1a011475baa1d368fa2d8328a1b7a8d848b62c94", + "identity": bytes.fromhex( + "c68dc811199d40ff7ecd8c8d46454ad9ac5f5cde9bae32f927fec10d82dbdf78" + ), + "active": False, + }, + { + "master": "0x4977d68df97bb313b23238520580d8d3a59939bf", + "endorsor": "0x7ad1d568b3fe5bad3fc264aca70bc7bcd5e4a6ff", + "identity": bytes.fromhex( + "83b137cf7e30864b8a4e56453eb1f094b4434685d86895de38ac2edcf5d3f534" + ), + "active": False, + }, + ] + } + + +@pytest.fixture() +def f_out_struct_dynarray_enc(f_out_struct_dynarray_data): + items = f_out_struct_dynarray_data["list"] + return bytes.fromhex( + "".join( + [ + "20".rjust(64, "0"), # address of value + hex(len(items))[2:].rjust(64, "0"), # length + ] + + [ + "".join( + [ + d["master"][2:].rjust(64, "0"), + d["endorsor"][2:].rjust(64, "0"), + d["identity"].hex(), + str(int(d["active"])).rjust(64, "0"), + ] + ) + for d in items + ] + ) + ) + + +@pytest.fixture() +def f_in_struct(): + data: FunctionT = { + "inputs": [ + { + "components": [ + {"internalType": "bool", "name": "flag1", "type": "bool"}, + {"internalType": "bool", "name": "flag2", "type": "bool"}, + {"internalType": "address", "name": "identity", "type": "address"}, + ], + "internalType": "struct ThisClass.SomeStruct", + "name": "args", + "type": "tuple", + } + ], + "name": "doSomething", + "outputs": [], + "stateMutability": "pure", + "type": "function", + } + return Function(data) + + +@pytest.fixture() +def f_in_struct_odd(): + data: FunctionT = { + "inputs": [ + { + "internalType": "struct ThisClass.SomeStruct", + "name": "args", + "type": "(bool,bool,address)", + } + ], + "name": "doSomething", + "outputs": [], + "stateMutability": "pure", + "type": "function", + } + return Function(data) + + +@pytest.fixture() +def f_in_struct_data(): + return { + "args": { + "flag1": True, + "flag2": True, + "identity": "0x4977d68df97bb313b23238520580d8d3a59939bf", + } + } + + +@pytest.fixture() +def f_in_struct_enc(f_in_struct_data): + bool1, bool2, b = f_in_struct_data["args"].values() + return bytes.fromhex( + str(int(bool1)).rjust(64, "0") + + str(int(bool2)).rjust(64, "0") + + b[2:].rjust(64, "0") + ) + + +def _make_f_in_struct_dynarray(array_size=""): + data: FunctionT = { + "inputs": [ + { + "components": [ + {"internalType": "bool", "name": "flag1", "type": "bool"}, + {"internalType": "bool", "name": "flag2", "type": "bool"}, + {"internalType": "address", "name": "identity", "type": "address"}, + ], + "internalType": f"struct ThisClass.SomeStruct[{array_size}]", + "name": "args", + "type": f"tuple[{array_size}]", + } + ], + "name": "doSomething", + "outputs": [], + "stateMutability": "pure", + "type": "function", + } + return Function(data) + + +@pytest.fixture() +def f_in_struct_dynarray(): + return _make_f_in_struct_dynarray("") + + +@pytest.fixture(params=[1, 2, 4, 17]) +def f_in_struct_dynarray_data(f_in_struct_data, request): + return {"args": [f_in_struct_data["args"] for _ in range(request.param)]} + + +@pytest.fixture() +def f_in_struct_dynarray_enc(f_in_struct_dynarray_data, dyn_prefix): + return bytes.fromhex( + dyn_prefix.hex() + + hex(len(f_in_struct_dynarray_data["args"]))[2:].rjust(64, "0") + + "".join( + [ + ( + str(int(d["flag1"])).rjust(64, "0") + + str(int(d["flag2"])).rjust(64, "0") + + d["identity"][2:].rjust(64, "0") + ) + for d in f_in_struct_dynarray_data["args"] + ] + ) + ) + + +@pytest.fixture(params=[1, 2, 4, 17]) +def f_in_struct_fixarray_data(f_in_struct_data, request): + return {"args": [f_in_struct_data["args"] for _ in range(request.param)]} + + +@pytest.fixture() +def f_in_struct_fixarray(f_in_struct_fixarray_data): + return _make_f_in_struct_dynarray(len(f_in_struct_fixarray_data["args"])) + + +@pytest.fixture() +def f_in_struct_fixarray_enc(f_in_struct_fixarray_data): + return bytes.fromhex( + "".join( + [ + ( + str(int(d["flag1"])).rjust(64, "0") + + str(int(d["flag2"])).rjust(64, "0") + + d["identity"][2:].rjust(64, "0") + ) + for d in f_in_struct_fixarray_data["args"] + ] + ) + ) + + +@pytest.fixture(params=[1, 2, 4, 17]) +def f_in_struct_complex_data(f_in_struct_data, request): + return { + "args": [f_in_struct_data["args"] for _ in range(request.param)], + "args2": [f_in_struct_data["args"] for _ in range(request.param + 2)], + "flag0": True, + } + + +@pytest.fixture() +def f_in_struct_complex(f_in_struct_complex_data): + array_size = len(f_in_struct_complex_data["args"]) + data: FunctionT = { + "inputs": [ + { + "components": [ + {"internalType": "bool", "name": "flag1", "type": "bool"}, + {"internalType": "bool", "name": "flag2", "type": "bool"}, + {"internalType": "address", "name": "identity", "type": "address"}, + ], + "internalType": f"struct ThisClass.SomeStruct[{array_size}]", + "name": "args", + "type": "tuple[]", + }, + { + "components": [ + {"internalType": "bool", "name": "flag1", "type": "bool"}, + {"internalType": "bool", "name": "flag2", "type": "bool"}, + {"internalType": "address", "name": "identity", "type": "address"}, + ], + "internalType": f"struct ThisClass.SomeStruct[{array_size + 2}]", + "name": "args2", + "type": "tuple[]", + }, + { + "internalType": "bool", + "name": "flag0", + "type": "bool", + }, + ], + "name": "doSomething", + "outputs": [], + "stateMutability": "pure", + "type": "function", + } + return Function(data) + + +@pytest.fixture() +def f_in_struct_complex_enc(f_in_struct_complex_data, dyn_prefix): + array_size = len(f_in_struct_complex_data["args"]) + return bytes.fromhex( + hex(32 * 3)[2:].rjust(64, "0") + + hex(32 * 4 + array_size * 32 * 3)[2:].rjust(64, "0") + + hex(int(f_in_struct_complex_data["flag0"]))[2:].rjust(64, "0") + + hex(array_size)[2:].rjust(64, "0") + + "".join( + [ + ( + str(int(d["flag1"])).rjust(64, "0") + + str(int(d["flag2"])).rjust(64, "0") + + d["identity"][2:].rjust(64, "0") + ) + for d in f_in_struct_complex_data["args"] + ] + ) + + hex(array_size + 2)[2:].rjust(64, "0") + + "".join( + [ + ( + str(int(d["flag1"])).rjust(64, "0") + + str(int(d["flag2"])).rjust(64, "0") + + d["identity"][2:].rjust(64, "0") + ) + for d in f_in_struct_complex_data["args2"] + ] + ) + ) + + +# *********************************************************** + + +def test_function(simple_dynamic: Function): + selector = "27fcbb2f" + assert simple_dynamic.selector.hex() == selector + assert simple_dynamic.name == "f1" + + encoded = bytes.fromhex( + selector + + "1".rjust(64, "0") # True + + "40".rjust(64, "0") # address of 2nd argument ("foo") + + hex(len(b"foo"))[2:].rjust(64, "0") # len("foo") + + b"foo".hex().ljust(64, "0") # "foo" + ) + assert simple_dynamic.encode([1, "foo"]).hex() == encoded.hex() + assert simple_dynamic.encode({"a1": 1, "a2": "foo"}).hex() == encoded.hex() + assert simple_dynamic.decode_parameters(encoded).to_dict() == {"a1": 1, "a2": "foo"} + + expected: Any = { + "r1": "0xabc0000000000000000000000000000000000001", + "r2": b"foo", + } + encoded_out = bytes.fromhex( + expected["r1"][2:].rjust(64, "0") + + "40".rjust(64, "0") # addr + + hex(len(expected["r2"]))[2:].rjust(64, "0") + + expected["r2"].hex().ljust(64, "0") + ) + assert simple_dynamic.decode(encoded_out).to_dict() == expected + assert simple_dynamic.encode_outputs(expected).hex() == encoded_out.hex() + assert ( + simple_dynamic.encode_outputs(tuple(expected.values())).hex() + == encoded_out.hex() + ) + + with pytest.warns(DeprecationWarning): + simple_dynamic.encode([1, "foo"], to_hex=True) + + +def test_string(f_get_str: Function): + assert f_get_str.selector.hex() == "b8c9e4ed" + assert f_get_str.name == "getStr" + + memory = b"Hello World!" + expected = {"memory": memory.decode()} + encoded_out = bytes.fromhex( + "20".rjust(64, "0") # address + + hex(len(memory))[2:].rjust(64, "0") # length + + memory.hex().ljust(64, "0") # content + ) + assert f_get_str.decode(encoded_out).to_dict() == expected + assert f_get_str.encode_outputs(expected).hex() == encoded_out.hex() + + +def test_bool(f_get_bool: Function): + assert f_get_bool.selector.hex() == "12a7b914" + assert f_get_bool.name == "getBool" + + expected = {"ret_0": True} + encoded_out = bytes.fromhex("1".rjust(64, "0")) + result = f_get_bool.decode(encoded_out) + + assert result.to_dict() == expected + assert result.ret_0 == expected["ret_0"] + assert result == tuple(expected.values()) + + assert f_get_bool.encode_outputs([True]).hex() == encoded_out.hex() + + with pytest.raises(ValueError, match=r".+unnamed.*"): + f_get_bool.encode_outputs(expected) + + +def test_big_number(f_get_big_numbers: Function): + assert f_get_big_numbers.selector.hex() == "ff0d6c7d" + assert f_get_big_numbers.name == "getBigNumbers" + + expected = {"a": 123456, "b": -123456} + encoded_out = bytes.fromhex( + hex(expected["a"])[2:].rjust(64, "0") + + hex(expected["b"] % 2**256)[2:].rjust(64, "0") + ) + + assert f_get_big_numbers.decode(encoded_out).to_dict() == expected + assert f_get_big_numbers.encode_outputs(expected).hex() == encoded_out.hex() + + +def test_abiv2( + f_out_struct_dynarray: Function, + f_out_struct_dynarray_data, + f_out_struct_dynarray_enc: bytes, +): + assert ( + f_out_struct_dynarray.decode(f_out_struct_dynarray_enc).to_dict() + == f_out_struct_dynarray_data + ) + assert ( + f_out_struct_dynarray.encode_outputs(f_out_struct_dynarray_data).hex() + == f_out_struct_dynarray_enc.hex() + ) + + +def test_inputs_struct(f_in_struct: Function, f_in_struct_data, f_in_struct_enc: bytes): + selector = bytes.fromhex("3ca45dbf") + encoded = selector + f_in_struct_enc + assert f_in_struct.selector.hex() == selector.hex() + assert f_in_struct.encode(f_in_struct_data).hex() == encoded.hex() + assert ( + f_in_struct.encode([tuple(f_in_struct_data["args"].values())]).hex() + == encoded.hex() + ) + assert ( + f_in_struct.decode_parameters(selector + f_in_struct_enc).to_dict() + == f_in_struct_data + ) + + +def test_inputs_struct_dynarray( + f_in_struct_dynarray: Function, + f_in_struct_dynarray_data, + f_in_struct_dynarray_enc: bytes, +): + selector = bytes.fromhex("eaf67dba") + assert f_in_struct_dynarray.selector.hex() == selector.hex() + assert ( + f_in_struct_dynarray.encode(f_in_struct_dynarray_data).hex() + == selector.hex() + f_in_struct_dynarray_enc.hex() + ) + assert ( + f_in_struct_dynarray.decode_parameters( + selector + f_in_struct_dynarray_enc + ).to_dict() + == f_in_struct_dynarray_data + ) + + +def test_inputs_struct_fixarray( + f_in_struct_fixarray: Function, + f_in_struct_fixarray_data, + f_in_struct_fixarray_enc: bytes, +): + encoded = f_in_struct_fixarray.selector + f_in_struct_fixarray_enc + assert f_in_struct_fixarray.encode(f_in_struct_fixarray_data).hex() == encoded.hex() + assert ( + f_in_struct_fixarray.decode_parameters(encoded).to_dict() + == f_in_struct_fixarray_data + ) + + +def test_inputs_complex( + f_in_struct_complex: Function, + f_in_struct_complex_data, + f_in_struct_complex_enc: bytes, +): + encoded = f_in_struct_complex.selector + f_in_struct_complex_enc + assert f_in_struct_complex.encode(f_in_struct_complex_data).hex() == encoded.hex() + assert ( + f_in_struct_complex.decode_parameters(encoded).to_dict() + == f_in_struct_complex_data + ) + + +def test_odd(f_in_struct_odd: Function, f_in_struct_data, f_in_struct_enc: bytes): + selector = bytes.fromhex("3ca45dbf") + encoded = selector + f_in_struct_enc + assert f_in_struct_odd.selector.hex() == selector.hex() + assert ( + f_in_struct_odd.encode([tuple(f_in_struct_data["args"].values())]).hex() + == encoded.hex() + ) + assert f_in_struct_odd.decode_parameters(selector + f_in_struct_enc).to_dict() == { + "args": tuple(f_in_struct_data["args"].values()) + } + + +# *********************************************************** + + +def test_constructor(): + contract = R""" + contract A { + constructor(uint val, bool flag) {} + } + """ + c = Constructor.from_solidity(text=contract) + + assert not c.selector + assert c.encode((8, True)).hex() == "08".rjust(64, "0") + "1".rjust(64, "0") + + with pytest.raises(AttributeError): + c.decode(b"\x04") + + +def test_constructor_2(): + data: ConstructorT = { + "type": "constructor", + "inputs": [{"type": "address", "name": ""}], + "stateMutability": "pure", + } + c = Constructor(data) + + addr = "0x5aAeb6053F3E94C9b9A09f33669435E7Ef1BeAed" + assert c.encode([addr]).hex() == addr[2:].rjust(64, "0").lower() diff --git a/tests/test_bloom.py b/tests/test_bloom.py index 63c5c50..a0c6a23 100644 --- a/tests/test_bloom.py +++ b/tests/test_bloom.py @@ -12,14 +12,44 @@ def test_estimate(): def test_add(): b = Bloom(14) - b.add(bytes('hello world', 'UTF-8')) - assert b.bits.hex() == '00000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000004000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000001000000000000020000000000000000000000000008000000000000000000000000000000080000000100000000000000000000040020000000000080000000000000000000080000000000000000000000000' + b.add(bytes("hello world", "UTF-8")) + assert b.bits.hex() == ( + "0000000000000000000000000000000000000000000000000004000000000000" + "0000000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000000" + "0000000000000100000000000400000000000000000004000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000000" + "0000000001000000000000000010000000000000200000000000000000000000" + "0000800000000000000000000000000000008000000010000000000000000000" + "0040020000000000080000000000000000000080000000000000000000000000" + ) def test_test(): b = Bloom(14) - for i in range(0, 100): - b.add(bytes(str(i), 'UTF-8')) + for i in range(100): + b.add(str(i).encode()) - for i in range(0, 100): - assert b.test(bytes(str(i), 'UTF-8')) \ No newline at end of file + for i in range(100): + assert b.test(str(i).encode()) + assert str(i).encode() in b + + for i in range(100, 200): + assert not b.test(str(i).encode()) + assert str(i).encode() not in b + + +def test_inherit(): + b = Bloom(14) + for i in range(50): + b.add(str(i).encode()) + + new_b = Bloom(14, b.bits) + for i in range(50, 100): + new_b.add(str(i).encode()) + + for i in range(100): + assert new_b.test(str(i).encode()) + + for i in range(100, 200): + assert not new_b.test(str(i).encode()) diff --git a/tests/test_certificate.py b/tests/test_certificate.py index adc57ad..c8ae7e1 100644 --- a/tests/test_certificate.py +++ b/tests/test_certificate.py @@ -1,91 +1,103 @@ -import copy import pytest -from thor_devkit import certificate -from thor_devkit import cry - -PRIV_KEY = bytes.fromhex( - '7582be841ca040aa940fff6c05773129e135623e41acce3e0b8ba520dc1ae26a') -SIGNER = '0x' + \ - cry.public_key_to_address(cry.secp256k1.derive_publicKey(PRIV_KEY)).hex() - -cert_dict = { - 'purpose': 'identification', - 'payload': { - 'type': 'text', - 'content': 'fyi' - }, - 'domain': 'localhost', - 'timestamp': 1545035330, - 'signer': SIGNER -} -cert = certificate.Certificate(**cert_dict) - -cert2_dict = { - 'domain': 'localhost', - 'timestamp': 1545035330, - 'purpose': 'identification', - 'signer': SIGNER, - 'payload': { - 'content': 'fyi', - 'type': 'text' - } -} -cert2 = certificate.Certificate(**cert2_dict) - +from voluptuous.error import Invalid -def test_encode(): - assert certificate.encode(cert) == certificate.encode(cert2) +from thor_devkit.certificate import Certificate, CertificateT +from thor_devkit.cry import blake2b256, public_key_to_address, secp256k1 +from thor_devkit.exceptions import BadSignature - temp = copy.deepcopy(cert_dict) - temp['signer'] = temp['signer'].upper() - temp_cert = certificate.Certificate(**temp) - assert certificate.encode(cert) == certificate.encode(temp_cert) - sig_bytes = cry.secp256k1.sign( - cry.blake2b256([ - certificate.encode(cert).encode('utf-8') - ])[0], - PRIV_KEY +@pytest.fixture() +def private_key(): + return bytes.fromhex( + "7582be841ca040aa940fff6c05773129e135623e41acce3e0b8ba520dc1ae26a" ) - sig = '0x' + sig_bytes.hex() - temp2 = copy.deepcopy(cert_dict) - temp2['signature'] = sig - temp2_cert = certificate.Certificate(**temp2) +@pytest.fixture() +def signer(private_key): + return "0x" + public_key_to_address(secp256k1.derive_public_key(private_key)).hex() - temp3 = copy.deepcopy(cert_dict) - temp3['signature'] = sig.upper() - temp3_cert = certificate.Certificate(**temp3) - assert certificate.encode(temp2_cert) == certificate.encode(temp3_cert) +@pytest.fixture() +def cert_1(signer): + data: CertificateT = { + "purpose": "identification", + "payload": {"type": "text", "content": "fyi"}, + "domain": "localhost", + "timestamp": 1545035330, + "signer": signer, + } + return Certificate(**data) + +@pytest.fixture() +def cert_2(signer): + data: CertificateT = { + "domain": "localhost", + "timestamp": 1545035330, + "purpose": "identification", + "signer": signer, + "payload": {"content": "fyi", "type": "text"}, + } + return Certificate(**data) -def test_verify(): - to_be_signed, _ = cry.blake2b256([ - certificate.encode(cert).encode('utf-8') - ]) - sig_bytes = cry.secp256k1.sign( - to_be_signed, - PRIV_KEY - ) +def test_encode_basic(cert_1, cert_2): + assert cert_1.encode() == cert_1.encode() + assert cert_1.encode() == cert_2.encode() - sig = '0x' + sig_bytes.hex() - # Signature doesn't match. - with pytest.raises(Exception, match='signature does not match with the signer.'): - temp = copy.copy(cert_dict) - temp['signature'] = sig - temp['signer'] = '0x' - certificate.verify(certificate.Certificate(**temp)) +def test_signer_is_case_insensitive(cert_1): + data = cert_1.to_dict() + data["signer"] = data["signer"].lower() + assert cert_1.encode() == Certificate(**data).encode() - # Everything is fine. - temp2 = copy.copy(cert_dict) - temp2['signature'] = sig - certificate.verify(certificate.Certificate(**temp2)) + +def test_signature_is_case_insensitive(cert_1, private_key): + sig_bytes = secp256k1.sign(blake2b256([cert_1.encode().encode()])[0], private_key) + sig = "0x" + sig_bytes.hex() + sig_lower_cert = Certificate(**cert_1.to_dict(), signature=sig) + sig_upper_cert = Certificate(**cert_1.to_dict(), signature=sig.upper()) + assert sig_lower_cert.encode() == sig_upper_cert.encode() + + +def test_verify(cert_1, private_key): + to_be_signed, _ = blake2b256([cert_1.encode().encode()]) + + sig_bytes = secp256k1.sign(to_be_signed, private_key) + sig = "0x" + sig_bytes.hex() # Everything is fine. - temp3 = copy.copy(cert_dict) - temp3['signature'] = sig.upper() - certificate.verify(certificate.Certificate(**temp3)) + Certificate(**cert_1.to_dict(), signature=sig).verify() + Certificate(**cert_1.to_dict(), signature=sig.upper()).verify() + + # Invalid signer + temp = cert_1.to_dict() + temp["signer"] = "0x" + with pytest.raises(Invalid): + c = Certificate(**temp, signature=sig) + + # Signature doesn't match. + temp = cert_1.to_dict() + temp["signer"] = "0x" + "0" * 40 + c = Certificate(**temp, signature=sig) + with pytest.raises(BadSignature): + c.verify() + assert not c.is_valid() + + # Signature missing. + temp = cert_1.to_dict() + c = Certificate(**temp) + with pytest.raises(ValueError, match=r"needs.*signature"): + c.verify() + assert not c.is_valid() + + # Signature of wrong length. + temp = cert_1.to_dict() + with pytest.raises(Invalid): + Certificate(**temp, signature=sig[:-2]) + + # Signature not a hex string. + temp = cert_1.to_dict() + with pytest.raises(Invalid): + Certificate(**temp, signature=sig[:-1] + "z") diff --git a/tests/test_cry.py b/tests/test_cry.py index 7956c72..67b4cbf 100644 --- a/tests/test_cry.py +++ b/tests/test_cry.py @@ -1,116 +1,226 @@ +import re + +import pytest +from voluptuous.error import Invalid + from thor_devkit import cry -from thor_devkit.cry import secp256k1 -from thor_devkit.cry import mnemonic -from thor_devkit.cry import keystore +from thor_devkit.cry import HDNode, keystore, mnemonic, secp256k1, utils -def test_utils(): - address = [ - '0x5aAeb6053F3E94C9b9A09f33669435E7Ef1BeAed', - '0xfB6916095ca1df60bB79Ce92cE3Ea74c37c5d359', - '0xdbF03B407c01E7cD3CBea99509d93f8DDDC8C6FB', - '0xD1220A0cf47c7B9Be7A2E6BA89F429762e7b9aDb' +@pytest.fixture( + params=[ + "0x5aAeb6053F3E94C9b9A09f33669435E7Ef1BeAed", + "0xfB6916095ca1df60bB79Ce92cE3Ea74c37c5d359", + "0xdbF03B407c01E7cD3CBea99509d93f8DDDC8C6FB", + "0XD1220A0cf47c7B9Be7A2E6BA89F429762e7b9aDb", ] +) +def address(request): + return request.param + + +@pytest.fixture() +def public_key(): + return bytes.fromhex( + "04b90e9bb2617387eba4502c730de65a33878ef384a46f1096d86f2da19043304" + "afa67d0ad09cf2bea0c6f2d1767a9e62a7a7ecc41facf18f2fa505d92243a658f" + ) + + +@pytest.fixture() +def private_key(): + return bytes.fromhex( + "7582be841ca040aa940fff6c05773129e135623e41acce3e0b8ba520dc1ae26a" + ) + - for addr in address: - assert cry.utils.remove_0x(addr).startswith('0x') == False +@pytest.fixture() +def seed_phrase(): + return ( + "ignore empty bird silly journey junior ripple have guard waste between tenant" + ) - # no 0x at all - assert cry.utils.remove_0x( - 'D1220A0cf47c7B9Be7A2E6BA89F429762e7b9aDb') == 'D1220A0cf47c7B9Be7A2E6BA89F429762e7b9aDb' - # 0x in the middle - assert cry.utils.remove_0x( - 'D1220x0A0cf47c7B9Be7A2E6BA89F429762e7b9aDb') == 'D1220x0A0cf47c7B9Be7A2E6BA89F429762e7b9aDb' +def test_remove_0x_1(address: str): + assert utils.remove_0x(address)[:2] not in {"0x", "0X"} + + +def test_remove_0x_2(): + # no 0x at all, same length + p = "D1220A0cf47c7B9Be7A2E6BA89F429762e7b9aDb" + assert utils.remove_0x(p) == p + + # 0x in the middle, other length + p = "12A10x12" + assert utils.remove_0x(p) == p + + +def test_strip_0x04(): + b = b"\x04" + bytes(64) + assert utils.strip_0x04(b) == bytes(64) + assert utils.strip_0x04(b"\xFF" * 65) == b"\xFF" * 65 + assert utils.strip_0x04(b"\xFF" * 64) == b"\xFF" * 64 + assert utils.strip_0x04(b"\xFF") == b"\xFF" + assert utils.strip_0x04(b"\x04") == b"\x04" def test_blake2b(): - h, _ = cry.blake2b256([b'hello world']) - assert h.hex() == '256c83b297114d201b30179f3f0ef0cace9783622da5974326b436178aeef610' + expected = "256c83b297114d201b30179f3f0ef0cace9783622da5974326b436178aeef610" + + h, _ = cry.blake2b256([b"hello world"]) + assert h.hex() == expected - h, _ = cry.blake2b256([b'hello', b' world']) - assert h.hex() == '256c83b297114d201b30179f3f0ef0cace9783622da5974326b436178aeef610' + h, _ = cry.blake2b256([b"hello", b" world"]) + assert h.hex() == expected + + with pytest.raises(TypeError): + cry.blake2b256(b"hello") # type: ignore[arg-type] def test_keccak256(): - h, _ = cry.keccak256([b'hello world']) - assert h.hex() == '47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad' + expected = "47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad" - h, _ = cry.keccak256([b'hello', b' world']) - assert h.hex() == '47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad' + h, _ = cry.keccak256([b"hello world"]) + assert h.hex() == expected + h, _ = cry.keccak256([b"hello", b" world"]) + assert h.hex() == expected + + with pytest.raises(TypeError): + cry.keccak256(b"hello") # type: ignore[arg-type] + + +def test_safe_lowercase(): + assert utils.safe_tolowercase("foo") == "foo" + assert utils.safe_tolowercase("Foo") == "foo" + assert utils.safe_tolowercase("F4") == "f4" + assert utils.safe_tolowercase(1) == 1 + + +def test_address(address: str): + assert cry.is_address(address) + assert cry.to_checksum_address(address) == re.sub(r"^(0X)", r"0x", address) -def test_address(): - address = [ - '0x5aAeb6053F3E94C9b9A09f33669435E7Ef1BeAed', - '0xfB6916095ca1df60bB79Ce92cE3Ea74c37c5d359', - '0xdbF03B407c01E7cD3CBea99509d93f8DDDC8C6FB', - '0xD1220A0cf47c7B9Be7A2E6BA89F429762e7b9aDb' - ] - for addr in address: - assert cry.is_address(addr) - assert cry.to_checksum_address(addr) == addr +def test_bad_address(): + with pytest.raises(ValueError, match=r".+not valid"): + cry.to_checksum_address("0x00") + with pytest.raises(ValueError, match=r".+not valid"): + cry.to_checksum_address(f"0x{'f' * 39}g") -def test_private_key(): - private_key = secp256k1.generate_privateKey() + +def test_private_key_length(private_key: bytes): + private_key = secp256k1.generate_private_key() assert len(private_key) == 32 + secp256k1.validate_private_key(private_key) + assert secp256k1.is_valid_private_key(private_key) + + +def test_private_key_validation(private_key: bytes): + key = b"\x00" * 32 + with pytest.raises(ValueError, match="zero"): + secp256k1.validate_private_key(key) + assert not secp256k1.is_valid_private_key(key) + + key = b"\xFF" * 32 + with pytest.raises(ValueError, match="MAX"): + secp256k1.validate_private_key(key) + assert not secp256k1.is_valid_private_key(key) + + key = b"\x00" * 31 + with pytest.raises(ValueError, match="Length"): + secp256k1.validate_private_key(key) + assert not secp256k1.is_valid_private_key(key) + + key = object() + with pytest.raises(ValueError, match="not convertible to bytes"): + secp256k1.validate_private_key(key) # type: ignore[arg-type] + assert not secp256k1.is_valid_private_key(key) # type: ignore[arg-type] -def test_derive_public_key(): - priv = bytes.fromhex( - '7582be841ca040aa940fff6c05773129e135623e41acce3e0b8ba520dc1ae26a') - pub = bytes.fromhex( - '04b90e9bb2617387eba4502c730de65a33878ef384a46f1096d86f2da19043304afa67d0ad09cf2bea0c6f2d1767a9e62a7a7ecc41facf18f2fa505d92243a658f') - _pub = secp256k1.derive_publicKey(priv) - assert pub.hex() == _pub.hex() +def test_upublic_key_validation(private_key: bytes): + key = b"\x04" + b"\x7E" * 64 + utils.validate_uncompressed_public_key(key) + assert utils.is_valid_uncompressed_public_key(key) + key = b"\x01" + b"\x7E" * 64 + with pytest.raises(ValueError, match="04"): + utils.validate_uncompressed_public_key(key) + assert not utils.is_valid_uncompressed_public_key(key) -def test_public_key_to_address(): - pub = bytes.fromhex( - '04b90e9bb2617387eba4502c730de65a33878ef384a46f1096d86f2da19043304afa67d0ad09cf2bea0c6f2d1767a9e62a7a7ecc41facf18f2fa505d92243a658f') - address = cry.public_key_to_address(pub) - assert '0x' + address.hex() == '0xd989829d88b0ed1b06edf5c50174ecfa64f14a64' + key = b"\x04" + b"\x7E" * 63 + with pytest.raises(ValueError, match="65 bytes"): + utils.validate_uncompressed_public_key(key) + assert not utils.is_valid_uncompressed_public_key(key) -def test_sign_hash(): - pub = bytes.fromhex( - '04b90e9bb2617387eba4502c730de65a33878ef384a46f1096d86f2da19043304afa67d0ad09cf2bea0c6f2d1767a9e62a7a7ecc41facf18f2fa505d92243a658f') - priv = bytes.fromhex( - '7582be841ca040aa940fff6c05773129e135623e41acce3e0b8ba520dc1ae26a') - msg_hash, _ = cry.keccak256([b'hello world']) +def test_derive_public_key(public_key: bytes, private_key: bytes): + _pub = secp256k1.derive_public_key(private_key) + assert public_key.hex() == _pub.hex() - sig = cry.secp256k1.sign(msg_hash, priv) - assert sig.hex() == 'f8fe82c74f9e1f5bf443f8a7f8eb968140f554968fdcab0a6ffe904e451c8b9244be44bccb1feb34dd20d9d8943f8c131227e55861736907b02d32c06b934d7200' - _pub = cry.secp256k1.recover(msg_hash, sig) - assert _pub.hex() == pub.hex() +def test_public_key_to_address(public_key: bytes): + address = cry.public_key_to_address(public_key) + assert "0x" + address.hex() == "0xd989829d88b0ed1b06edf5c50174ecfa64f14a64" -def test_mnemonic(): - SENTENCE = 'ignore empty bird silly journey junior ripple have guard waste between tenant' - SEED = '28bc19620b4fbb1f8892b9607f6e406fcd8226a0d6dc167ff677d122a1a64ef936101a644e6b447fd495677f68215d8522c893100d9010668614a68b3c7bb49f' - PRIV = '27196338e7d0b5e7bf1be1c0327c53a244a18ef0b102976980e341500f492425' +def test_sign_hash(public_key: bytes, private_key: bytes): + msg_hash, _ = cry.keccak256([b"hello world"]) + + sig = secp256k1.sign(msg_hash, private_key) + assert sig.hex() == ( + "f8fe82c74f9e1f5bf443f8a7f8eb968140f554968fdcab0a6ffe904e451c8b924" + "4be44bccb1feb34dd20d9d8943f8c131227e55861736907b02d32c06b934d7200" + ) + + _pub = secp256k1.recover(msg_hash, sig) + assert _pub.hex() == public_key.hex() + + with pytest.raises(ValueError, match="of type 'bytes'"): + secp256k1.sign(object(), private_key) # type: ignore[arg-type] + + with pytest.raises(ValueError, match="32 bytes"): + secp256k1.sign(b"\x0A" * 30, private_key) + + with pytest.raises(ValueError, match="Signature"): + secp256k1.recover(msg_hash, private_key[:-1]) + + with pytest.raises(ValueError, match="Signature"): + secp256k1.recover(msg_hash, private_key[:-1] + b"\x02") + + +def test_mnemonic(seed_phrase): + SEED = ( + "28bc19620b4fbb1f8892b9607f6e406fcd8226a0d6dc167ff677d122a1a64ef9" + "36101a644e6b447fd495677f68215d8522c893100d9010668614a68b3c7bb49f" + ) + PRIV = "27196338e7d0b5e7bf1be1c0327c53a244a18ef0b102976980e341500f492425" # Random Generate. _words = mnemonic.generate() assert len(_words) == 12 + # Non-standard strength + with pytest.raises(ValueError, match=r"strength should be one of"): + mnemonic.generate(72) # type: ignore[arg-type] + # Valid: True - words = SENTENCE.split(' ') - assert mnemonic.validate(words) == True + words = seed_phrase.split() + assert mnemonic.is_valid(words) # Valid: True - assert mnemonic.validate(mnemonic.generate()) == True + assert mnemonic.is_valid(mnemonic.generate()) # Valid: False - words2 = 'hello word'.split(' ') - assert mnemonic.validate(words2) == False + words2 = "hello word".split() + assert not mnemonic.is_valid(words2) # Valid: False - words3 = sorted(SENTENCE.split(' ')) - assert mnemonic.validate(words3) == False + words3 = sorted(seed_phrase.split()) + assert not mnemonic.is_valid(words3) + with pytest.raises(ValueError, match=r".+ check.+"): + mnemonic.derive_seed(words3) # Seed generated from words. assert mnemonic.derive_seed(words) == bytes.fromhex(SEED) @@ -120,65 +230,121 @@ def test_mnemonic(): def test_keystore(): - ks = { + ks: keystore.KeyStoreT = { "version": 3, "id": "f437ebb1-5b0d-4780-ae9e-8640178ffd77", "address": "dc6fa3ec1f3fde763f4d59230ed303f854968d26", - "crypto": - { + "crypto": { "kdf": "scrypt", "kdfparams": { "dklen": 32, - "salt": "b57682e5468934be81217ad5b14ca74dab2b42c2476864592c9f3b370c09460a", + "salt": ( + "b57682e5468934be81217ad5b14ca74dab2b42c2476864592c9f3b370c09460a" + ), "n": 262144, "r": 8, - "p": 1 + "p": 1, }, "cipher": "aes-128-ctr", - "ciphertext": "88cb876f9c0355a89cad88ee7a17a2179700bc4306eaf78fa67320efbb4c7e31", - "cipherparams": { - "iv": "de5c0c09c882b3f679876b22b6c5af21" - }, - "mac": "8426e8a1e151b28f694849cb31f64cbc9ae3e278d02716cf5b61d7ddd3f6e728" - } + "ciphertext": ( + "88cb876f9c0355a89cad88ee7a17a2179700bc4306eaf78fa67320efbb4c7e31" + ), + "cipherparams": {"iv": "de5c0c09c882b3f679876b22b6c5af21"}, + "mac": "8426e8a1e151b28f694849cb31f64cbc9ae3e278d02716cf5b61d7ddd3f6e728", + }, } - password = b'123456' - private_key_hex = '1599403f7b6c17bb09f16e7f8ebe697af3626db5b41e0f9427a49151c6216920' + password = b"123456" + private_key_hex = "1599403f7b6c17bb09f16e7f8ebe697af3626db5b41e0f9427a49151c6216920" _priv = keystore.decrypt(ks, password) assert _priv.hex() == private_key_hex + norm_ks = keystore.KEYSTORE(ks) + new_ks = keystore.encrypt(bytes.fromhex(private_key_hex), password) + assert new_ks["version"] == norm_ks["version"] + assert new_ks["address"] == norm_ks["address"] + + assert keystore.decrypt(new_ks, password.decode()).hex() == private_key_hex + + keystore.validate(ks) + assert keystore.is_valid(ks) + + ks["address"] = "00" + with pytest.raises(Invalid): + keystore.validate(ks) + assert not keystore.is_valid(ks) -def test_hdnode(): - sentence = 'ignore empty bird silly journey junior ripple have guard waste between tenant' - words = sentence.split(' ') + +def test_hdnode(seed_phrase): + words = seed_phrase.split(" ") addresses = [ - '339fb3c438606519e2c75bbf531fb43a0f449a70', - '5677099d06bc72f9da1113afa5e022feec424c8e', - '86231b5cdcbfe751b9ddcd4bd981fc0a48afe921', - 'd6f184944335f26ea59dbb603e38e2d434220fcd', - '2ac1a0aecd5c80fb5524348130ab7cf92670470a' + "339fb3c438606519e2c75bbf531fb43a0f449a70", + "5677099d06bc72f9da1113afa5e022feec424c8e", + "86231b5cdcbfe751b9ddcd4bd981fc0a48afe921", + "d6f184944335f26ea59dbb603e38e2d434220fcd", + "2ac1a0aecd5c80fb5524348130ab7cf92670470a", ] - hd_node = cry.HDNode.from_mnemonic(words) + hd_node = HDNode.from_mnemonic(words) for idx, address in enumerate(addresses): child_node = hd_node.derive(idx) - assert child_node.address().hex() == address - - priv = hd_node.private_key() - pub = hd_node.public_key() - cc = hd_node.chain_code() - - n = cry.HDNode.from_private_key(priv, cc) + assert child_node.address.hex() == address + + priv = hd_node.private_key + pub = hd_node.public_key + cc = hd_node.chain_code + assert ( + priv.hex() == "e4a2687ec443f4d23b6ba9e7d904a31acdda90032b34aa0e642e6dd3fd36f682" + ) + assert pub.hex() == ( + "04dc40b4324626eb393dbf77b6930e915dcca6297b42508adb743674a8ad5c69a0" + "46010f801a62cb945a6cb137a050cefaba0572429fc4afc57df825bfca2f219a" + ) + assert ( + cc.hex() == "105da5578eb3228655a8abe70bf4c317e525c7f7bb333634f5b7d1f70e111a33" + ) + + hd_node.finger_print + + n = HDNode.from_private_key(priv, cc) for idx, address in enumerate(addresses): child_node = n.derive(idx) - assert child_node.address().hex() == address + assert child_node.address.hex() == address - n2 = cry.HDNode.from_public_key(pub, cc) + n2 = HDNode.from_public_key(pub, cc) for idx, address in enumerate(addresses): - child_node = n.derive(idx) - assert child_node.address().hex() == address + child_node = n2.derive(idx) + assert child_node.address.hex() == address + + HDNode.from_seed(mnemonic.derive_seed(words)) + + +def test_strict_zip(): + from thor_devkit.cry.utils import _strict_zip + + assert list(_strict_zip()) == [] + assert list(_strict_zip([])) == [] + assert list(_strict_zip([1, 2])) == [(1,), (2,)] + assert list(_strict_zip([1, 2], ["a", "b"])) == [(1, "a"), (2, "b")] + assert list(_strict_zip([1, 2], ["a", "b"], (3, 4))) == [(1, "a", 3), (2, "b", 4)] + + def _gen(): + yield from range(5) + + assert list(_strict_zip(_gen(), range(5))) == [ + (0, 0), + (1, 1), + (2, 2), + (3, 3), + (4, 4), + ] + + with pytest.raises(ValueError, match="argument 2 is shorter"): + list(_strict_zip([1, 2, 3], [1, 2])) + + with pytest.raises(ValueError, match="argument 2 is longer"): + list(_strict_zip([1, 2, 3], [1, 2, 3, 4])) diff --git a/tests/test_rlp.py b/tests/test_rlp.py index fd13449..6be3a57 100644 --- a/tests/test_rlp.py +++ b/tests/test_rlp.py @@ -1,51 +1,64 @@ import pytest -from rlp.exceptions import DeserializationError, SerializationError + from thor_devkit import rlp as m_rlp +from thor_devkit.exceptions import DeserializationError, SerializationError -def test_bytesKind(): +def test_bytes_kind(): kind = m_rlp.BytesKind() - assert kind.serialize(bytes.fromhex('ff')) == b'\xff' - assert kind.serialize(bytes.fromhex('01ff')) == b'\x01\xff' + assert kind.serialize(bytes.fromhex("ff")) == b"\xff" + assert kind.serialize(bytes.fromhex("01ff")) == b"\x01\xff" + + assert kind.deserialize(bytes.fromhex("ff")) == b"\xff" + assert kind.deserialize(bytes.fromhex("01ff")) == b"\x01\xff" - assert kind.deserialize(bytes.fromhex('ff')) == b'\xff' - assert kind.deserialize(bytes.fromhex('01ff')) == b'\x01\xff' + with pytest.raises(TypeError): + kind.serialize(1) # type: ignore[arg-type] - with pytest.raises(SerializationError): - kind.serialize(1) + with pytest.raises(TypeError): + kind.serialize("0x1234") # type: ignore[arg-type] - with pytest.raises(SerializationError): - kind.serialize('0x1234') + with pytest.raises(TypeError): + kind.deserialize("01ff") # type: ignore[arg-type] -def test_numericKind_encode(): +def test_numeric_kind_encode(): # Set up a max 8 bytes width NumericKind. kind = m_rlp.NumericKind(8) # Should pass - assert kind.serialize('0x0').hex() == '' - assert kind.serialize('0x123').hex() == '0123' - assert kind.serialize('0').hex() == '' - assert kind.serialize('100').hex() == '64' - assert kind.serialize(0).hex() == '' - assert kind.serialize(0x123).hex() == '0123' + assert kind.serialize("0x0").hex() == "" + assert kind.serialize("0x123").hex() == "0123" + assert kind.serialize("0").hex() == "" + assert kind.serialize("100").hex() == "64" + assert kind.serialize(0).hex() == "" + assert kind.serialize(0x123).hex() == "0123" # Should Throw - with pytest.raises(SerializationError): - kind.serialize('0x123z') + with pytest.raises( + SerializationError, match="The input string does not represent a number" + ): + kind.serialize("0x123z") - with pytest.raises(SerializationError): - kind.serialize({}) + with pytest.raises(TypeError, match=r"expected str or int, got.+"): + kind.serialize({}) # type: ignore[arg-type] - with pytest.raises(SerializationError): - kind.serialize('0x') + with pytest.raises( + SerializationError, match="The input string does not represent a number" + ): + kind.serialize("0x") - with pytest.raises(SerializationError): + with pytest.raises(SerializationError, match="Cannot serialize negative integers"): kind.serialize(-1) - with pytest.raises(SerializationError): - kind.serialize('0x12345678123456780') + with pytest.raises( + SerializationError, match=r"Integer too large \(does not fit in 8 bytes\)" + ): + kind.serialize("0x12345678123456780") + + with pytest.raises(TypeError, match=r"expected str or int, got.+"): + kind.serialize(None) # type: ignore[arg-type] # We won't hit this exception because big int are safe in Python. # Max Integer problem in Javascript: 2^53 -1 @@ -57,236 +70,308 @@ def test_numericKind_encode(): # kind.serialize(2 ** 64) -def test_numericKind_decode(): +def test_numeric_kind_decode(): # Set up a max 8 bytes width NumericKind. kind = m_rlp.NumericKind(8) # Should pass. assert kind.deserialize(bytes(0)) == 0 - assert kind.deserialize(bytes([1, 2, 3])) == int('0x010203', 16) - assert kind.deserialize(bytes([1, 2, 3, 4, 5, 6, 7, 8])) == int( - '0x102030405060708', 16) + assert kind.deserialize(bytes([1, 2, 3])) == int("0x010203", 16) + assert kind.deserialize(bytes(range(1, 9))) == int("0x102030405060708", 16) # Should fail. - with pytest.raises(DeserializationError): + with pytest.raises(DeserializationError, match=r"wrong size"): kind.deserialize(bytes([1] * 9)) - with pytest.raises(DeserializationError): + with pytest.raises(DeserializationError, match=r"leading zeroes"): kind.deserialize(bytes([0, 1, 2])) -def test_blobKind_encode(): +def test_blob_kind_encode(): kind = m_rlp.BlobKind() - assert kind.serialize('0x1234567890').hex() == '1234567890' + assert kind.serialize("0x1234567890").hex() == "1234567890" - with pytest.raises(SerializationError, match=".+even.+"): - kind.serialize('0x1') + with pytest.raises(SerializationError, match=r"even"): + kind.serialize("0x1") - with pytest.raises(SerializationError): - kind.serialize('0xxy') + with pytest.raises(SerializationError, match=r"Expected.+string"): + kind.serialize("0xxy") - with pytest.raises(Exception): - kind.serialize(1) + with pytest.raises(SerializationError, match=r"Expected.+string"): + kind.serialize(1) # type: ignore[arg-type] -def test_blobKind_decode(): +def test_blob_kind_decode(): kind = m_rlp.BlobKind() - assert kind.deserialize(bytes([1, 2, 3, 4, 5])) == '0x0102030405' + assert kind.deserialize(bytes([1, 2, 3, 4, 5])) == "0x0102030405" + with pytest.raises(TypeError, match=r"expected bytes"): + kind.deserialize("12") # type: ignore[arg-type] -def test_fixedBlob_encode(): + +def test_fixed_blob_encode(): kind = m_rlp.FixedBlobKind(4) - assert kind.serialize('0x12345678').hex() == '12345678' + assert kind.serialize("0x12345678").hex() == "12345678" - with pytest.raises(SerializationError): - kind.serialize('0x1234567z') + with pytest.raises(SerializationError, match=r"Expected.+string"): + kind.serialize("0x1234567z") - with pytest.raises(SerializationError): - kind.serialize('0x1234567890') + with pytest.raises(SerializationError, match=r"Expected.+8, got 10"): + kind.serialize("0x1234567890") - with pytest.raises(SerializationError): - kind.serialize('0x1234567') + with pytest.raises(SerializationError, match=r"even"): + kind.serialize("0x1234567") - with pytest.raises(Exception): - kind.serialize(1) + with pytest.raises(SerializationError, match=r"Expected.+string"): + kind.serialize(1) # type: ignore[arg-type] - with pytest.raises(Exception): - kind.serialize(None) + with pytest.raises(SerializationError, match=r"Expected.+string"): + kind.serialize(None) # type: ignore[arg-type] -def test_fixedBlob_decode(): +def test_fixed_blob_decode(): kind = m_rlp.FixedBlobKind(4) - assert kind.deserialize(bytes([1, 2, 3, 4])) == '0x01020304' + assert kind.deserialize(bytes([1, 2, 3, 4])) == "0x01020304" - with pytest.raises(DeserializationError): + with pytest.raises(DeserializationError, match=r"Bytes should be of length 4"): kind.deserialize(bytes([0, 0])) - with pytest.raises(DeserializationError): + with pytest.raises(DeserializationError, match=r"Bytes should be of length 4"): kind.deserialize(bytes(0)) -def test_noneableFixedBlobKind_encode(): - kind = m_rlp.NoneableFixedBlobKind(4) +def test_optional_fixed_blob_kind_encode(): + kind = m_rlp.OptionalFixedBlobKind(4) - assert kind.serialize(None).hex() == '' - assert kind.serialize('0x12345678').hex() == '12345678' + assert kind.serialize(None).hex() == "" + assert kind.serialize("0x12345678").hex() == "12345678" - with pytest.raises(SerializationError): - kind.serialize('0x1234567z') + with pytest.raises(SerializationError, match=r"Expected.+string"): + kind.serialize("0x1234567z") - with pytest.raises(SerializationError): - kind.serialize('0x11') + with pytest.raises(SerializationError, match=r"of length 8"): + kind.serialize("0x11") - with pytest.raises(SerializationError): - kind.serialize('0x1234567890') + with pytest.raises(SerializationError, match=r"of length 8"): + kind.serialize("0x1234567890") - with pytest.raises(SerializationError): - kind.serialize('0x1234567') + with pytest.raises(SerializationError, match=r"even"): + kind.serialize("0x1234567") - with pytest.raises(Exception): - kind.serialize(1) + with pytest.raises(SerializationError, match="int"): + kind.serialize(1) # type: ignore[arg-type] - with pytest.raises(SerializationError): - kind.serialize('0x') + with pytest.raises(SerializationError, match=r"of length 8"): + kind.serialize("0x") -def test_noneableFixedBlobKind_decode(): - kind = m_rlp.NoneableFixedBlobKind(4) +def test_optional_fixed_blob_kind_decode(): + kind = m_rlp.OptionalFixedBlobKind(4) assert kind.deserialize(bytes(0)) is None - assert kind.deserialize(bytes([1, 2, 3, 4])) == '0x01020304' + assert kind.deserialize(bytes([1, 2, 3, 4])) == "0x01020304" - with pytest.raises(DeserializationError): - kind.deserialize(bytes([0, 0])) + with pytest.raises(DeserializationError, match=r"Bytes should be of length 4"): + kind.deserialize(bytes(2)) -def test_compact_fixed_blobkind_encode(): +def test_compact_fixed_blob_kind_encode(): kind = m_rlp.CompactFixedBlobKind(4) # zero leading - assert kind.serialize('0x00112233').hex() == '112233' + assert kind.serialize("0x00112233").hex() == "112233" # zero in the middle - assert kind.serialize('0x11002233').hex() == '11002233' + assert kind.serialize("0x11002233").hex() == "11002233" + -def test_compact_fixed_blobkind_decode(): +def test_compact_fixed_blob_kind_decode(): kind = m_rlp.CompactFixedBlobKind(4) - # should prefix the zeros - assert kind.deserialize(bytes([1])) == '0x00000001' - # should prefix the zeros, and the middle zeros should not interfer. - assert kind.deserialize(bytes.fromhex('110022')) == '0x00110022' + # Should prefix the zeros + assert kind.deserialize(bytes([1])) == "0x00000001" + # Should prefix the zeros, and the middle zeros should not interfere. + assert kind.deserialize(bytes.fromhex("110022")) == "0x00110022" + with pytest.raises(DeserializationError, match=r"too long"): + kind.deserialize(b"1122334455") -def test_compact_fixed_blobkind_encode_with_zero(): + with pytest.raises(DeserializationError, match=r"no leading zeroes"): + kind.deserialize(bytes(1)) + + +def test_compact_fixed_blob_kind_encode_with_zero(): kind = m_rlp.CompactFixedBlobKind(4) - assert kind.serialize('0x00000000').hex() == '' + assert kind.serialize("0x00000000") == b"" + assert kind.deserialize(b"") == "0x00000000" -def test_rlp_complex(): - my_data = { +@pytest.fixture() +def complex_data(): + return { "foo": 123, - "bar": '0x12345678', - "baz": [ - { "x": '0x11', "y": 1234 }, - { "x": '0x12', "y": 5678 } - ] + "bar": "0x12345678", + "baz": [{"x": "0x11", "y": 1234}, {"x": "0x12", "y": 5678}], } - my_wrapper = m_rlp.DictWrapper([ - ("foo", m_rlp.NumericKind()), - ("bar", m_rlp.FixedBlobKind(4)), - ("baz", m_rlp.ListWrapper( - list_of_codecs=[ - m_rlp.DictWrapper([ - ("x", m_rlp.BlobKind()), - ("y", m_rlp.NumericKind()) - ]), - m_rlp.DictWrapper([ - ("x", m_rlp.BlobKind()), - ("y", m_rlp.NumericKind()) - ]) - ] - ) - ) - ]) - cc = m_rlp.ComplexCodec(my_wrapper) +@pytest.fixture() +def complex_encoded(): + return "d17b8412345678cac4118204d2c41282162e" - assert cc.encode(my_data).hex() == 'd17b8412345678cac4118204d2c41282162e' - assert cc.decode(bytes.fromhex('d17b8412345678cac4118204d2c41282162e')) == my_data +@pytest.fixture() +def complex_codec(): + return m_rlp.ComplexCodec( + m_rlp.DictWrapper( + [ + ("foo", m_rlp.NumericKind()), + ("bar", m_rlp.FixedBlobKind(4)), + ( + "baz", + m_rlp.ListWrapper( + [ + m_rlp.DictWrapper( + [("x", m_rlp.BlobKind()), ("y", m_rlp.NumericKind())] + ), + m_rlp.DictWrapper( + [("x", m_rlp.BlobKind()), ("y", m_rlp.NumericKind())] + ), + ] + ), + ), + ] + ) + ) + + +@pytest.fixture() +def complex_codec_homo(): + return m_rlp.ComplexCodec( + m_rlp.DictWrapper( + { + "foo": m_rlp.NumericKind(), + "bar": m_rlp.FixedBlobKind(4), + "baz": m_rlp.HomoListWrapper( + m_rlp.DictWrapper({"x": m_rlp.BlobKind(), "y": m_rlp.NumericKind()}) + ), + } + ) + ) -def test_rlp_complex_homo(): - my_data = { +@pytest.fixture() +def complex_data_nested(): + return { "foo": 123, - "bar": '0x12345678', + "bar": "0x12345678", "baz": [ - { "x": '0x11', "y": 1234 }, - { "x": '0x12', "y": 5678 } - ] + {"x": "0x11", "y": 1234}, + {"x": "0x12", "y": 5678}, + 789, + [123, {"a": 1}], + ], } - my_wrapper = m_rlp.DictWrapper([ - ("foo", m_rlp.NumericKind()), - ("bar", m_rlp.FixedBlobKind(4)), - ("baz", m_rlp.HomoListWrapper( - codec=m_rlp.DictWrapper([ - ("x", m_rlp.BlobKind()), - ("y", m_rlp.NumericKind()) - ]) - ) + +@pytest.fixture() +def complex_codec_nested(): + return m_rlp.ComplexCodec( + m_rlp.DictWrapper( + { + "foo": m_rlp.NumericKind(), + "bar": m_rlp.FixedBlobKind(4), + "baz": m_rlp.ListWrapper( + [ + m_rlp.DictWrapper( + {"x": m_rlp.BlobKind(), "y": m_rlp.NumericKind()} + ), + m_rlp.DictWrapper( + {"x": m_rlp.BlobKind(), "y": m_rlp.NumericKind()} + ), + m_rlp.NumericKind(), + m_rlp.ListWrapper( + [ + m_rlp.NumericKind(), + m_rlp.DictWrapper({"a": m_rlp.NumericKind()}), + ] + ), + ] + ), + } ) - ]) + ) - cc = m_rlp.ComplexCodec(my_wrapper) - assert cc.encode(my_data).hex() == 'd17b8412345678cac4118204d2c41282162e' +def test_rlp_complex(complex_data, complex_codec, complex_encoded): + assert complex_codec.encode(complex_data).hex() == complex_encoded + assert complex_codec.decode(bytes.fromhex(complex_encoded)) == complex_data - assert cc.decode(bytes.fromhex('d17b8412345678cac4118204d2c41282162e')) == my_data +def test_rlp_complex_malformed_1(complex_data, complex_codec): + complex_data.pop("foo") + with pytest.raises(SerializationError, match=r"Missing key: 'foo'"): + complex_codec.encode(complex_data) -def test_rlp_complex_strange(): - my_data = { - "foo": 123, - "bar": '0x12345678', - "baz": [ - { "x": '0x11', "y": 1234 }, - { "x": '0x12', "y": 5678 }, - 789, - [ - 123, - { - "a": 1 - } - ] - ] - } - my_wrapper = m_rlp.DictWrapper([ - ("foo", m_rlp.NumericKind()), - ("bar", m_rlp.FixedBlobKind(4)), - ("baz", m_rlp.ListWrapper([ - m_rlp.DictWrapper([ - ("x", m_rlp.BlobKind()), - ("y", m_rlp.NumericKind()) - ]), - m_rlp.DictWrapper([ - ("x", m_rlp.BlobKind()), - ("y", m_rlp.NumericKind()) - ]), - m_rlp.NumericKind(), - m_rlp.ListWrapper([ - m_rlp.NumericKind(), - m_rlp.DictWrapper([ - ("a", m_rlp.NumericKind()) - ]) - ]) - ])) - ]) - - cc = m_rlp.ComplexCodec(my_wrapper) - - my_bytes = cc.encode(my_data) # encode - assert cc.decode(my_bytes) == my_data # decode \ No newline at end of file +def test_rlp_complex_malformed_2(complex_data, complex_codec): + complex_data.pop("foo") + complex_data["sam"] = 19 + with pytest.raises(SerializationError, match=r"Missing key: 'foo'"): + complex_codec.encode(complex_data) + + +def test_rlp_complex_malformed_3(complex_data, complex_codec): + complex_data.pop("foo") + complex_data["sam"] = 18 + complex_data["say"] = 19 + with pytest.raises(SerializationError, match=r"Missing key: 'foo'"): + complex_codec.encode(complex_data) + + +def test_rlp_complex_malformed_4(complex_data, complex_codec): + complex_data["sam"] = 19 + with pytest.raises(SerializationError, match=r"Keys count differs:"): + complex_codec.encode(complex_data) + + +def test_rlp_complex_malformed_5(complex_data, complex_codec): + complex_data["baz"].append(2) + with pytest.raises(SerializationError, match=r"Items count differs:"): + complex_codec.encode(complex_data) + + +def test_rlp_complex_malformed_6(complex_data, complex_codec): + complex_data["baz"].pop(-1) + with pytest.raises(SerializationError, match=r"Items count differs:"): + complex_codec.encode(complex_data) + + +def test_rlp_complex_malformed_7(complex_codec): + # Add extra item to array + enc = "d67b8412345678cfc4118204d2c41282162ec413820457" + with pytest.raises(DeserializationError, match=r"Items count differs:"): + complex_codec.decode(bytes.fromhex(enc)) + + +def test_rlp_complex_homo(complex_data, complex_codec_homo, complex_encoded): + assert complex_codec_homo.encode(complex_data).hex() == complex_encoded + assert complex_codec_homo.decode(bytes.fromhex(complex_encoded)) == complex_data + + +def test_rlp_complex_strange(complex_data_nested, complex_codec_nested): + my_bytes = complex_codec_nested.encode(complex_data_nested) + assert complex_codec_nested.decode(my_bytes) == complex_data_nested + + +def test_wrong_coder_pack(): + from thor_devkit.rlp import DictWrapper, pack, unpack + + with pytest.raises(TypeError), pytest.warns(DeprecationWarning): + pack(1, int) # type: ignore[arg-type] + + with pytest.raises(TypeError), pytest.warns(DeprecationWarning): + unpack(b"", int) # type: ignore[arg-type] + + with pytest.raises(TypeError), pytest.warns(DeprecationWarning): + unpack(b"", DictWrapper) # type: ignore[arg-type] diff --git a/tests/test_transaction.py b/tests/test_transaction.py index 6cba40d..ceeeca8 100644 --- a/tests/test_transaction.py +++ b/tests/test_transaction.py @@ -1,250 +1,414 @@ -import copy import pytest -from thor_devkit import cry, transaction - -body = { - "chainTag": 1, - "blockRef": '0x00000000aabbccdd', - "expiration": 32, - "clauses": [ - { - "to": '0x7567d83b7b8d80addcb281a71d54fc7b3364ffed', - "value": 10000, - "data": '0x000000606060' - }, - { - "to": '0x7567d83b7b8d80addcb281a71d54fc7b3364ffed', - "value": 20000, - "data": '0x000000606060' - } - ], - "gasPriceCoef": 128, - "gas": 21000, - "dependsOn": None, - "nonce": 12345678 -} - -unsigned = transaction.Transaction(body) -unsigned_encoded = bytes.fromhex('f8540184aabbccdd20f840df947567d83b7b8d80addcb281a71d54fc7b3364ffed82271086000000606060df947567d83b7b8d80addcb281a71d54fc7b3364ffed824e208600000060606081808252088083bc614ec0') - -signed = transaction.Transaction(body) -signed_encoded = bytes.fromhex('f8970184aabbccdd20f840df947567d83b7b8d80addcb281a71d54fc7b3364ffed82271086000000606060df947567d83b7b8d80addcb281a71d54fc7b3364ffed824e208600000060606081808252088083bc614ec0b841f76f3c91a834165872aa9464fc55b03a13f46ea8d3b858e528fcceaf371ad6884193c3f313ff8effbb57fe4d1adc13dceb933bedbf9dbb528d2936203d5511df00') -priv_key = bytes.fromhex('7582be841ca040aa940fff6c05773129e135623e41acce3e0b8ba520dc1ae26a') -_a, _ = cry.blake2b256([signed.encode()]) -_b = cry.secp256k1.sign(_a, priv_key) -signed.set_signature(_b) -signer = cry.public_key_to_address(cry.secp256k1.derive_publicKey(priv_key)) - -def test_unsigned(): +from voluptuous.error import Invalid + +import thor_devkit.transaction # noqa: F401 # Used for mocking +from thor_devkit import cry +from thor_devkit.exceptions import ( + BadTransaction, + DeserializationError, + SerializationError, +) +from thor_devkit.transaction import Transaction, TransactionBodyT + + +@pytest.fixture() +def non_delegated_body() -> TransactionBodyT: + return { + "chainTag": 1, + "blockRef": "0x00000000aabbccdd", + "expiration": 32, + "clauses": [ + { + "to": "0x7567d83b7b8d80addcb281a71d54fc7b3364ffed", + "value": 10000, + "data": "0x000000606060", + }, + { + "to": "0x7567d83b7b8d80addcb281a71d54fc7b3364ffed", + "value": 20000, + "data": "0x000000606060", + }, + ], + "gasPriceCoef": 128, + "gas": 21000, + "dependsOn": None, + "nonce": 12345678, + } + + +@pytest.fixture() +def unsigned_non_delegated_tx(non_delegated_body): + return Transaction(non_delegated_body) + + +@pytest.fixture() +def unsigned_non_delegated_encoded(non_delegated_body): + return bytes.fromhex( + "f8540184aabbccdd20f840df947567d83b7b8d80addcb281a71d54fc7b3364ff" + "ed82271086000000606060df947567d83b7b8d80addcb281a71d54fc7b3364ff" + "ed824e208600000060606081808252088083bc614ec0" + ) + + +@pytest.fixture() +def signed_non_delegated_encoded(non_delegated_body): + return bytes.fromhex( + "f8970184aabbccdd20f840df947567d83b7b8d80addcb281a71d54fc7b3364ff" + "ed82271086000000606060df947567d83b7b8d80addcb281a71d54fc7b3364ff" + "ed824e208600000060606081808252088083bc614ec0b841f76f3c91a8341658" + "72aa9464fc55b03a13f46ea8d3b858e528fcceaf371ad6884193c3f313ff8eff" + "bb57fe4d1adc13dceb933bedbf9dbb528d2936203d5511df00" + ) + + +@pytest.fixture() +def private_key(): + return bytes.fromhex( + "7582be841ca040aa940fff6c05773129e135623e41acce3e0b8ba520dc1ae26a" + ) + + +@pytest.fixture() +def signed_non_delegated_tx(non_delegated_body, private_key): + signed = Transaction(non_delegated_body) + _a, _ = cry.blake2b256([signed.encode()]) + _b = cry.secp256k1.sign(_a, private_key) + signed.signature = _b + return signed + + +@pytest.fixture() +def signer(private_key): + return cry.public_key_to_address(cry.secp256k1.derive_public_key(private_key)) + + +@pytest.fixture() +def delegated_body() -> TransactionBodyT: + return { + "chainTag": 1, + "blockRef": "0x00000000aabbccdd", + "expiration": 32, + "clauses": [ + { + "to": "0x7567d83b7b8d80addcb281a71d54fc7b3364ffed", + "value": 10000, + "data": "0x000000606060", + }, + { + "to": "0x7567d83b7b8d80addcb281a71d54fc7b3364ffed", + "value": 20000, + "data": "0x000000606060", + }, + ], + "gasPriceCoef": 128, + "gas": 21000, + "dependsOn": None, + "nonce": 12345678, + "reserved": {"features": 1, "unused": [b"1234"]}, + } + + +@pytest.fixture() +def unsigned_delegated_tx(delegated_body): + return Transaction(delegated_body) + + +def test_unsigned(unsigned_non_delegated_tx, unsigned_non_delegated_encoded): + unsigned = unsigned_non_delegated_tx + x = unsigned.encode() signing_hash, _ = cry.blake2b256([x]) - assert signing_hash.hex() == '2a1c25ce0d66f45276a5f308b99bf410e2fc7d5b6ea37a49f2ab9f1da9446478' + assert ( + signing_hash.hex() + == "2a1c25ce0d66f45276a5f308b99bf410e2fc7d5b6ea37a49f2ab9f1da9446478" + ) - assert unsigned.get_signing_hash().hex() == '2a1c25ce0d66f45276a5f308b99bf410e2fc7d5b6ea37a49f2ab9f1da9446478' + assert ( + unsigned.get_signing_hash().hex() + == "2a1c25ce0d66f45276a5f308b99bf410e2fc7d5b6ea37a49f2ab9f1da9446478" + ) - assert unsigned.get_id() is None + assert not unsigned.is_delegated - assert unsigned.get_intrinsic_gas() == 37432 + assert unsigned.intrinsic_gas == 37432 - assert unsigned.get_signature() == None + assert unsigned.id is None + assert unsigned.signature is None + assert unsigned.origin is None - assert unsigned.get_origin() == None + assert unsigned.encode().hex() == unsigned_non_delegated_encoded.hex() - assert unsigned.encode().hex() == unsigned_encoded.hex() + assert Transaction.decode(unsigned_non_delegated_encoded, True) == unsigned - assert transaction.Transaction.decode(unsigned_encoded, True) == unsigned - body_1 = copy.deepcopy(body) - body_1['clauses'] = [] +def test_unsigned_gas_1(non_delegated_body): + non_delegated_body["clauses"] = [] - assert transaction.Transaction(body_1).get_intrinsic_gas() == 21000 + assert Transaction(non_delegated_body).intrinsic_gas == 21000 - body_2 = copy.deepcopy(body) - body_2['clauses'] = [ - { - "to": None, - "value": 0, - "data": '0x' - } - ] - assert transaction.Transaction(body_2).get_intrinsic_gas() == 53000 - - -def test_empty_data(): - body_1 = copy.deepcopy(body) - body_1['clauses'][0]['data'] = '0x' - transaction.Transaction(body_1).encode() - - -def test_invalid_body(): - with pytest.raises(Exception): - body_1 = copy.deepcopy(body) - body_1["chainTag"] = 256 - transaction.Transaction(body_1).encode() - - with pytest.raises(Exception): - body_1 = copy.deepcopy(body) - body_1["chainTag"] = -1 - transaction.Transaction(body_1).encode() - - with pytest.raises(Exception): - body_1 = copy.deepcopy(body) - body_1["chainTag"] = 1.1 - transaction.Transaction(body_1).encode() - - with pytest.raises(Exception): - body_1 = copy.deepcopy(body) - body_1['blockRef'] = '0x' - transaction.Transaction(body_1).encode() - - with pytest.raises(Exception): - body_1 = copy.deepcopy(body) - body_1['blockRef'] = '0x' + '0' * 18 - transaction.Transaction(body_1).encode() - - with pytest.raises(Exception): - body_1 = copy.deepcopy(body) - body_1['expiration'] = 2 ** 32 - transaction.Transaction(body_1).encode() - - with pytest.raises(Exception): - body_1 = copy.deepcopy(body) - body_1['expiration'] = -1 - transaction.Transaction(body_1).encode() - - with pytest.raises(Exception): - body_1 = copy.deepcopy(body) - body_1['expiration'] = 1.1 - transaction.Transaction(body_1).encode() - - with pytest.raises(Exception): - body_1 = copy.deepcopy(body) - body_1['gasPriceCoef'] = 256 - transaction.Transaction(body_1).encode() - - with pytest.raises(Exception): - body_1 = copy.deepcopy(body) - body_1['gasPriceCoef'] = -1 - transaction.Transaction(body_1).encode() - - with pytest.raises(Exception): - body_1 = copy.deepcopy(body) - body_1['gasPriceCoef'] = 1.1 - transaction.Transaction(body_1).encode() - - with pytest.raises(Exception): - body_1 = copy.deepcopy(body) - body_1['gas'] = '0x10000000000000000' - transaction.Transaction(body_1).encode() - - with pytest.raises(Exception): - body_1 = copy.deepcopy(body) - body_1['nonce'] = '0x10000000000000000' - transaction.Transaction(body_1).encode() - -def test_signed(): - assert signed.get_signature().hex() == 'f76f3c91a834165872aa9464fc55b03a13f46ea8d3b858e528fcceaf371ad6884193c3f313ff8effbb57fe4d1adc13dceb933bedbf9dbb528d2936203d5511df00' - assert signed.get_origin() == '0x' + signer.hex() - assert signed.get_id() == '0xda90eaea52980bc4bb8d40cb2ff84d78433b3b4a6e7d50b75736c5e3e77b71ec' - assert signed.get_signing_hash('0x' + signer.hex()).hex() == 'da90eaea52980bc4bb8d40cb2ff84d78433b3b4a6e7d50b75736c5e3e77b71ec' - -def test_encode_decode(): - assert signed.encode().hex() == signed_encoded.hex() - assert transaction.Transaction.decode(signed_encoded, False) == signed - - with pytest.raises(Exception): - transaction.Transaction.decode(unsigned_encoded, False) - - # TODO - # with pytest.raises(Exception): - # transaction.Transaction.decode(signed_encoded, True) - -def test_incorrectly_signed(): - tx = transaction.Transaction(body) - tx.set_signature(bytes([1,2,3])) - assert tx.get_origin() == None - assert tx.get_id() == None - -delegated_body = { - "chainTag": 1, - "blockRef": '0x00000000aabbccdd', - "expiration": 32, - "clauses": [ - { - "to": '0x7567d83b7b8d80addcb281a71d54fc7b3364ffed', - "value": 10000, - "data": '0x000000606060' - }, - { - "to": '0x7567d83b7b8d80addcb281a71d54fc7b3364ffed', - "value": 20000, - "data": '0x000000606060' - } - ], - "gasPriceCoef": 128, - "gas": 21000, - "dependsOn": None, - "nonce": 12345678, - "reserved": { - "features": 1, - "unused": [b'1234'] - } -} +def test_unsigned_gas_2(non_delegated_body): + non_delegated_body["clauses"] = [{"to": None, "value": 0, "data": "0x"}] + + assert Transaction(non_delegated_body).intrinsic_gas == 53000 + + +def test_empty_data(non_delegated_body): + non_delegated_body["clauses"][0]["data"] = "0x" + Transaction(non_delegated_body).encode() + + +def test_invalid_body_1(non_delegated_body: TransactionBodyT): + non_delegated_body["chainTag"] = 256 + with pytest.raises(SerializationError, match=r".+too large.+"): + Transaction(non_delegated_body).encode() + + +def test_invalid_body_2(non_delegated_body: TransactionBodyT): + non_delegated_body["chainTag"] = -1 + with pytest.raises(SerializationError, match=r".+negative.+"): + Transaction(non_delegated_body).encode() + + +def test_invalid_body_3(non_delegated_body: TransactionBodyT): + non_delegated_body["chainTag"] = 1.1 # type: ignore + with pytest.raises(Invalid): + Transaction(non_delegated_body).encode() + + +def test_invalid_body_4(non_delegated_body: TransactionBodyT): + non_delegated_body["blockRef"] = "0x" + with pytest.raises(Invalid): + Transaction(non_delegated_body) + + +def test_invalid_body_5(non_delegated_body: TransactionBodyT): + non_delegated_body["blockRef"] = "0x" + "0" * 18 + with pytest.raises(Invalid): + Transaction(non_delegated_body) + -delegated_tx = transaction.Transaction(copy.deepcopy(delegated_body)) +def test_invalid_body_6(non_delegated_body: TransactionBodyT): + non_delegated_body["expiration"] = 2**32 + with pytest.raises(SerializationError, match=r".+too large.+"): + Transaction(non_delegated_body).encode() -def test_features(): - assert unsigned.is_delegated() == False - assert delegated_tx.is_delegated() == True + +def test_invalid_body_7(non_delegated_body: TransactionBodyT): + non_delegated_body["expiration"] = -1 + with pytest.raises(SerializationError, match=r".+negative.+"): + Transaction(non_delegated_body).encode() + + +def test_invalid_body_8(non_delegated_body: TransactionBodyT): + non_delegated_body["expiration"] = 1.1 # type: ignore + with pytest.raises(Invalid): + Transaction(non_delegated_body).encode() + + +def test_invalid_body_9(non_delegated_body: TransactionBodyT): + non_delegated_body["gasPriceCoef"] = 256 + with pytest.raises(SerializationError, match=r".+too large.+"): + Transaction(non_delegated_body).encode() + + +def test_invalid_body_10(non_delegated_body: TransactionBodyT): + non_delegated_body["gasPriceCoef"] = -1 + with pytest.raises(SerializationError, match=r".+negative.+"): + Transaction(non_delegated_body).encode() + + +def test_invalid_body_11(non_delegated_body: TransactionBodyT): + non_delegated_body["gasPriceCoef"] = 1.1 # type: ignore + with pytest.raises(Invalid): + Transaction(non_delegated_body).encode() + + +def test_invalid_body_12(non_delegated_body: TransactionBodyT): + non_delegated_body["gas"] = "0x10000000000000000" + with pytest.raises(SerializationError, match=r".+too large.+"): + Transaction(non_delegated_body).encode() + + +def test_invalid_body_13(non_delegated_body: TransactionBodyT): + non_delegated_body["nonce"] = "0x10000000000000000" + with pytest.raises(SerializationError, match=r".+too large.+"): + Transaction(non_delegated_body).encode() + + +def test_reserved_with_untrimmed_bytes(non_delegated_body): + non_delegated_body["reserved"] = {"features": 0, "unused": [b""]} + untrimmed_enc = bytes.fromhex( + "f8560184aabbccdd20f840df947567d83b7b8d80addcb281a71d54fc7b3364ffed" + "82271086000000606060df947567d83b7b8d80addcb281a71d54fc7b3364ffed82" + "4e208600000060606081808252088083bc614ec28080" + ) + trimmed_enc = bytes.fromhex( + "f8540184aabbccdd20f840df947567d83b7b8d80addcb281a71d54fc7b3364ffed" + "82271086000000606060df947567d83b7b8d80addcb281a71d54fc7b3364ffed82" + "4e208600000060606081808252088083bc614ec0" + ) + assert Transaction(non_delegated_body).encode().hex() == trimmed_enc.hex() + with pytest.raises(BadTransaction): + Transaction.decode(untrimmed_enc, unsigned=True) + + +def test_reserved(non_delegated_body): + non_delegated_body["reserved"] = {"features": 1} + enc = Transaction(non_delegated_body).encode() + assert non_delegated_body == Transaction.decode(enc, unsigned=True).body + + +def test_reserved_features_0(non_delegated_body): + non_delegated_body["reserved"] = {"features": 0} + enc = Transaction(non_delegated_body).encode() + expected_body = {**non_delegated_body} + expected_body.pop("reserved") + assert expected_body == Transaction.decode(enc, unsigned=True).body + + +def test_signed(signed_non_delegated_tx, signer): + signed = signed_non_delegated_tx + + assert signed.signature + assert signed.signature.hex() == ( + "f76f3c91a834165872aa9464fc55b03a13f46ea8d3b858e528fcceaf371ad6884" + "193c3f313ff8effbb57fe4d1adc13dceb933bedbf9dbb528d2936203d5511df00" + ) + assert signed.origin == "0x" + signer.hex() + assert ( + signed.id + == "0xda90eaea52980bc4bb8d40cb2ff84d78433b3b4a6e7d50b75736c5e3e77b71ec" + ) + assert ( + signed.get_signing_hash("0x" + signer.hex()).hex() + == "da90eaea52980bc4bb8d40cb2ff84d78433b3b4a6e7d50b75736c5e3e77b71ec" + ) + + +def test_encode_decode( + signed_non_delegated_tx, + signed_non_delegated_encoded, + unsigned_non_delegated_encoded, +): + assert signed_non_delegated_tx.encode().hex() == signed_non_delegated_encoded.hex() + assert ( + Transaction.decode(signed_non_delegated_encoded, False) + == signed_non_delegated_tx + ) + + with pytest.raises(DeserializationError): + Transaction.decode(unsigned_non_delegated_encoded, False) + + with pytest.raises(DeserializationError): + Transaction.decode(signed_non_delegated_encoded, True) + + +def test_incorrectly_signed_non_delegated(non_delegated_body): + tx = Transaction(non_delegated_body) + tx.signature = bytes([1, 2, 3]) + assert tx.origin is None + assert tx.id is None + + tx.signature = bytes(range(65)) + assert tx.origin is None + assert tx.id is None + assert tx.delegator is None + + +def test_incorrectly_signed_delegated(delegated_body, mocker): + tx = Transaction(delegated_body) + tx.signature = bytes([1, 2, 3]) + assert tx.origin is None + assert tx.id is None + assert tx.delegator is None + + mocker.patch( + "thor_devkit.transaction.Transaction.origin", + new_callable=mocker.PropertyMock, + return_value="0x" + bytes(range(64)).hex(), + ) + tx = Transaction(delegated_body) + tx.signature = bytes(range(65 * 2)) + assert tx.origin is not None + assert tx.delegator is None + + mocker.patch( + "thor_devkit.transaction.Transaction.origin", + new_callable=mocker.PropertyMock, + return_value=None, + ) + tx = Transaction(delegated_body) + tx.signature = bytes(range(65 * 2)) + assert tx.is_delegated + assert tx._signature_is_valid() + assert tx.origin is None + assert tx.delegator is None + + +def test_features(unsigned_delegated_tx): + assert unsigned_delegated_tx.is_delegated + assert unsigned_delegated_tx != {} # Sender # priv_1 = cry.secp256k1.generate_privateKey() - priv_1 = bytes.fromhex('58e444d4fe08b0f4d9d86ec42f26cf15072af3ddc29a78e33b0ceaaa292bcf6b') - addr_1 = cry.public_key_to_address( cry.secp256k1.derive_publicKey(priv_1) ) + priv_1 = bytes.fromhex( + "58e444d4fe08b0f4d9d86ec42f26cf15072af3ddc29a78e33b0ceaaa292bcf6b" + ) + addr_1 = cry.public_key_to_address(cry.secp256k1.derive_public_key(priv_1)) # Gas payer # priv_2 = cry.secp256k1.generate_privateKey() - priv_2 = bytes.fromhex('0bfd6a863f347f4ef2cf2d09c3db7b343d84bb3e6fc8c201afee62de6381dc65') - addr_2 = cry.public_key_to_address( cry.secp256k1.derive_publicKey(priv_2) ) + priv_2 = bytes.fromhex( + "0bfd6a863f347f4ef2cf2d09c3db7b343d84bb3e6fc8c201afee62de6381dc65" + ) + addr_2 = cry.public_key_to_address(cry.secp256k1.derive_public_key(priv_2)) - h = delegated_tx.get_signing_hash() - dh = delegated_tx.get_signing_hash('0x' + addr_1.hex()) + h = unsigned_delegated_tx.get_signing_hash() + dh = unsigned_delegated_tx.get_signing_hash("0x" + addr_1.hex()) # Concat two parts to forge a signature. sig = cry.secp256k1.sign(h, priv_1) + cry.secp256k1.sign(dh, priv_2) - delegated_tx.set_signature(sig) + unsigned_delegated_tx.signature = sig + + assert unsigned_delegated_tx.origin == "0x" + addr_1.hex() + assert unsigned_delegated_tx.delegator == "0x" + addr_2.hex() - assert delegated_tx.get_origin() == '0x' + addr_1.hex() - assert delegated_tx.get_delegator() == '0x' + addr_2.hex() # Well this is a dangerous part, we tests the "private" function. # Shouldn't recommend you to do the same, but I need to test it. -def test_unused(): - delegated_body_2 = copy.deepcopy(delegated_body) - delegated_body_2["reserved"]["unused"] = [bytes.fromhex("0F0F"), bytes.fromhex("0101")] - delegated_tx_2 = transaction.Transaction(delegated_body_2) - assert delegated_tx_2.is_delegated() == True - assert transaction.Transaction.decode(delegated_tx_2.encode(), True) == delegated_tx_2 - - reserved_list = delegated_tx_2._encode_reserved() - assert reserved_list == [bytes.fromhex("01"), bytes.fromhex("0F0F"), bytes.fromhex("0101")] - - delegated_body_3 = copy.deepcopy(delegated_body) - delegated_body_3["reserved"]["unused"] = [bytes.fromhex("0F0F"), bytes(0)] - delegated_tx_3 = transaction.Transaction(delegated_body_3) - assert delegated_tx_3.is_delegated() == True - - reserved_list = delegated_tx_3._encode_reserved() +def test_unused_1(delegated_body): + delegated_body["reserved"]["unused"] = [ + bytes.fromhex("0F0F"), + bytes.fromhex("0101"), + ] + delegated_tx = Transaction(delegated_body) + assert delegated_tx.is_delegated + assert Transaction.decode(delegated_tx.encode(), True) == delegated_tx + + reserved_list = delegated_tx._encode_reserved() + assert reserved_list == [ + bytes.fromhex("01"), + bytes.fromhex("0F0F"), + bytes.fromhex("0101"), + ] + + +def test_unused_2(delegated_body): + delegated_body["reserved"]["unused"] = [bytes.fromhex("0F0F"), bytes(0)] + delegated_tx = Transaction(delegated_body) + assert delegated_tx.is_delegated + + reserved_list = delegated_tx._encode_reserved() assert reserved_list == [bytes.fromhex("01"), bytes.fromhex("0F0F")] - assert transaction.Transaction.decode(delegated_tx_3.encode(), True) == delegated_tx_3 + assert Transaction.decode(delegated_tx.encode(), True) == delegated_tx + -def test_body_copy(): - b1 = copy.deepcopy(body) - tx = transaction.Transaction(b1) - b2 = tx.get_body(False) - b3 = tx.get_body(True) +def test_body_copy(non_delegated_body): + tx = Transaction(non_delegated_body) + b1 = tx.body + b2 = tx.copy_body() - assert id(b2) != id(b3) # id should be different - assert b2 == b3 # content should be the same \ No newline at end of file + assert b1 is not b2 # id should be different + assert b1 == b2 # content should be the same diff --git a/tests/test_validation.py b/tests/test_validation.py new file mode 100644 index 0000000..7102dea --- /dev/null +++ b/tests/test_validation.py @@ -0,0 +1,247 @@ +import pytest +from voluptuous.error import Invalid + +from thor_devkit.validation import address_type, hex_integer, hex_string + + +@pytest.fixture( + params=[ + "0x00", + "0x01", + "0x1", + "0xFFFFFFE", + "0xFFFffE", + "0xeeeeee", + "0x" + "F" * 128, + ] +) +def integer_prefixed(request): + return request.param + + +@pytest.fixture() +def integer_unprefixed(integer_prefixed): + return integer_prefixed[2:] + + +def test_hex_integer_no_length_no_prefix(integer_unprefixed): + assert ( + hex_integer(require_prefix=False)(integer_unprefixed) + == "0x" + integer_unprefixed.lower() + ) + assert hex_integer(require_prefix=False, to_int=True)(integer_unprefixed) == int( + integer_unprefixed, 16 + ) + + +def test_hex_integer_no_length_prefix_allowed(integer_prefixed): + assert ( + hex_integer(require_prefix=False)(integer_prefixed) == integer_prefixed.lower() + ) + assert hex_integer(require_prefix=False, to_int=True)(integer_prefixed) == int( + integer_prefixed, 16 + ) + + assert hex_integer()(integer_prefixed) == integer_prefixed.lower() + assert hex_integer(to_int=True)(integer_prefixed) == int(integer_prefixed, 16) + + +def test_hex_integer_no_length_prefix_missing(integer_unprefixed): + with pytest.raises(Invalid, match=r"must start with .0x."): + hex_integer()(integer_unprefixed) + + +def test_hex_integer_with_length_prefix_ok(integer_prefixed): + assert ( + hex_integer(len(integer_prefixed) - 2)(integer_prefixed) + == integer_prefixed.lower() + ) + + +def test_hex_integer_with_length_no_prefix_ok(integer_unprefixed): + assert ( + hex_integer(len(integer_unprefixed), require_prefix=False)(integer_unprefixed) + == "0x" + integer_unprefixed.lower() + ) + + +def test_hex_integer_with_length_prefix_longer(integer_prefixed): + with pytest.raises(Invalid, match=r"Expected.*length"): + hex_integer(len(integer_prefixed) - 2)(integer_prefixed + "f") + + +def test_hex_integer_with_length_no_prefix_longer(integer_unprefixed): + with pytest.raises(Invalid, match=r"Expected.*length"): + hex_integer(len(integer_unprefixed), require_prefix=False)( + integer_unprefixed + "f" + ) + + +def test_hex_integer_with_length_prefix_shorter(integer_prefixed): + with pytest.raises(Invalid, match=r"Expected.*length"): + hex_integer(len(integer_prefixed) - 2)(integer_prefixed[:-1]) + + +def test_hex_integer_with_length_no_prefix_shorter(integer_unprefixed): + with pytest.raises(Invalid, match=r"Expected.*length"): + hex_integer(len(integer_unprefixed), require_prefix=False)( + integer_unprefixed[:-1] + ) + + +def test_hex_integer_empty(): + assert hex_integer(allow_empty=True)("0x") == "0x" + assert hex_integer(length=0, allow_empty=True)("0x") == "0x" + + assert hex_integer(require_prefix=False, allow_empty=True)("") == "0x" + assert hex_integer(require_prefix=False, length=0, allow_empty=True)("") == "0x" + + assert hex_integer(require_prefix=False, allow_empty=True)("0x") == "0x" + assert hex_integer(require_prefix=False, length=0, allow_empty=True)("0x") == "0x" + + with pytest.warns(RuntimeWarning): + assert hex_integer(length=0)("0x") == "0x" + + +def test_hex_integer_odd(): + with pytest.raises(Invalid, match="Expected string"): + hex_integer()(0) # type: ignore[arg-type] + + with pytest.raises(Invalid, match="Expected string"): + hex_integer()(object()) # type: ignore[arg-type] + + with pytest.raises(Invalid, match="convertible to number"): + hex_integer()("0xzz") + + +# ----------------------------------------------------------------------------- + + +@pytest.fixture( + params=[ + "0x00", + "0x01", + "0xFFFFFF", + "0xFFFffE", + "0xeeeeee", + "0x" + "F" * 128, + ] +) +def string_prefixed(request): + return request.param + + +@pytest.fixture() +def string_unprefixed(string_prefixed): + return string_prefixed[2:] + + +def test_hex_string_no_length_no_prefix(string_unprefixed): + assert hex_string()(string_unprefixed) == string_unprefixed.lower() + assert hex_string(to_bytes=True)(string_unprefixed) == bytes.fromhex( + string_unprefixed + ) + + assert hex_string(allow_prefix=True)(string_unprefixed) == string_unprefixed.lower() + assert hex_string(allow_prefix=True, to_bytes=True)( + string_unprefixed + ) == bytes.fromhex(string_unprefixed) + + +def test_hex_string_no_length_prefix_allowed(string_prefixed): + assert hex_string(allow_prefix=True)(string_prefixed) == string_prefixed.lower()[2:] + assert hex_string(allow_prefix=True, to_bytes=True)( + string_prefixed + ) == bytes.fromhex(string_prefixed[2:]) + + +def test_hex_string_no_length_prefix_denied(string_prefixed): + with pytest.raises(Invalid, match=r"without .0x. prefix"): + hex_string()(string_prefixed) + + +def test_hex_string_with_length_prefix_ok(string_prefixed): + assert ( + hex_string(len(string_prefixed) - 2, allow_prefix=True)(string_prefixed) + == string_prefixed.lower()[2:] + ) + + +def test_hex_string_with_length_no_prefix_ok(string_unprefixed): + assert ( + hex_string(len(string_unprefixed))(string_unprefixed) + == string_unprefixed.lower() + ) + + +def test_hex_string_with_length_prefix_longer(string_prefixed): + with pytest.raises(Invalid, match=r"Expected.*length"): + hex_string(len(string_prefixed) - 2, allow_prefix=True)(string_prefixed + "f") + + +def test_hex_string_with_length_no_prefix_longer(string_unprefixed): + with pytest.raises(Invalid, match=r"Expected.*length"): + hex_string(len(string_unprefixed))(string_unprefixed + "f") + + +def test_hex_string_with_length_prefix_shorter(string_prefixed): + with pytest.raises(Invalid, match=r"Expected.*length"): + hex_string(len(string_prefixed) - 2, allow_prefix=True)(string_prefixed[:-1]) + + +def test_hex_string_with_length_no_prefix_shorter(string_unprefixed): + with pytest.raises(Invalid, match=r"Expected.*length"): + hex_string(len(string_unprefixed))(string_unprefixed[:-1]) + + +def test_hex_string_empty(): + assert hex_string(allow_prefix=True, allow_empty=True)("0x") == "" + assert hex_string(allow_prefix=True, length=0, allow_empty=True)("0x") == "" + + assert hex_string(allow_prefix=True, allow_empty=True)("") == "" + assert hex_string(allow_prefix=True, length=0, allow_empty=True)("") == "" + + assert hex_string(allow_empty=True)("") == "" + assert hex_string(length=0, allow_empty=True)("") == "" + + with pytest.warns(RuntimeWarning): + assert hex_string(length=0)("") == "" + + +def test_hex_string_odd(): + with pytest.raises(Invalid, match="Expected string"): + hex_string()(0) # type: ignore[arg-type] + + with pytest.raises(Invalid, match="Expected string"): + hex_string()(object()) # type: ignore[arg-type] + + with pytest.raises(Invalid, match="convertible to bytes"): + hex_string()("zz") + + +# ----------------------------------------------------------------------------- + + +@pytest.mark.parametrize( + "addr", + [ + "0" * 40, + "f" * 40, + "F" * 40, + "4fa" + "F" * 37, + ], +) +def test_address_valid(addr): + assert address_type()(addr) == "0x" + addr.lower() + assert address_type()("0x" + addr) == "0x" + addr.lower() + + +def test_address_invalid(): + with pytest.raises(Invalid): + address_type()(None) # type: ignore[arg-type] + with pytest.raises(Invalid): + address_type()("0x") + with pytest.raises(Invalid): + address_type()("") + with pytest.raises(Invalid): + address_type()("0x" + "f" * 39) diff --git a/thor_devkit/__init__.py b/thor_devkit/__init__.py index 856ba54..5f4ea8c 100644 --- a/thor_devkit/__init__.py +++ b/thor_devkit/__init__.py @@ -1 +1,10 @@ -from .bloom import Bloom \ No newline at end of file +"""Python VeChain SDK. + +Python 3 (Python 3.6+) library to assist smooth development on VeChain +for developers and hobbyists. +""" +from .bloom import Bloom + +VERSION = (2, 0, 0) + +__all__ = ["Bloom"] diff --git a/thor_devkit/abi.py b/thor_devkit/abi.py index 917c3b2..5de47a3 100644 --- a/thor_devkit/abi.py +++ b/thor_devkit/abi.py @@ -1,360 +1,1856 @@ -''' -ABI Module. +r"""ABI encoding module.""" + +import os +import re +import sys +import warnings +from abc import ABC, abstractmethod +from collections import namedtuple +from keyword import iskeyword +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Generic, + Iterable, + Iterator, + List, + Mapping, + NamedTuple, + NoReturn, + Optional, + Sequence, + Tuple, + Type, + TypeVar, + Union, + cast, + overload, +) -ABI structure the "Functions" and "Events". +import eth_abi +import eth_utils +import solcx +import voluptuous +from voluptuous import Schema + +from thor_devkit.cry import keccak256 +from thor_devkit.cry.utils import _with_doc_mro, izip +from thor_devkit.deprecation import deprecated_to_property + +if sys.version_info < (3, 8): + from typing_extensions import Final, Literal, TypedDict +else: + from typing import Final, Literal, TypedDict +if sys.version_info < (3, 10): + from typing_extensions import TypeAlias +else: + from typing import TypeAlias +if sys.version_info < (3, 11): + from typing_extensions import NotRequired +else: + from typing import NotRequired + +__all__ = [ + # Main + "Function", + "Constructor", + "Event", + "Coder", + # Types + "_ParameterT", + "StateMutabilityT", + "FuncParameterT", + "FunctionT", + "ConstructorT", + "EventParameterT", + "EventT", + # Schemas + "MUTABILITY", + "FUNC_PARAMETER", + "FUNCTION", + "CONSTRUCTOR", + "EVENT_PARAMETER", + "EVENT", + # Other + "calc_event_topic", + "calc_function_selector", + "FunctionResult", + "Encodable", + "FunctionBase", +] + +MUTABILITY: Final = Schema(voluptuous.Any("pure", "view", "payable", "nonpayable")) +""" +Validation :external:class:`~voluptuous.schema_builder.Schema` +for ``stateMutability`` parameter. + +Must be a string, one of: "pure", "view", "payable", "nonpayable". + +:meta hide-value: + +.. versionchanged:: 2.0.0 + Removed unsupported "constant" option. +""" + + +StateMutabilityT: TypeAlias = Literal["pure", "view", "payable", "nonpayable"] +"""Literal type of ``stateMutability`` parameter. + +Must be a string, one of: "pure", "view", "payable", "nonpayable". + +.. versionadded:: 2.0.0 +""" + + +class _ParameterT(TypedDict): + """Base for parameter of function or event.""" + + name: str + """Parameter name.""" + type: str # noqa: A003 + """Parameter type.""" + + +FUNC_PARAMETER: Final = Schema( + { + "name": str, + "type": str, + voluptuous.Optional("internalType"): str, + # if the "type" field is "tuple" or "type[]" + voluptuous.Optional("components"): [voluptuous.Self], + }, + required=True, +) +""" +Validation :external:class:`~voluptuous.schema_builder.Schema` for function parameter. -ABI also encode/decode params for functions. +:meta hide-value: +""" -See: -https://github.com/ethereum/wiki/wiki/Ethereum-Contract-ABI -"Function Selector": -sha3("funcName(uint256,address)") -> cut out first 4 bytes. +@_with_doc_mro(_ParameterT) +class FuncParameterT(_ParameterT): + """Type of ABI function parameter. -"Argument Encoding": + .. versionadded:: 2.0.0 + """ -Basic: -uint M=8,16,...256 -int M=8,16,...256 -address -bool -fixedx fixed256x18 -bytes bytes32 -function 20bytes address + 4 bytes signature. + internalType: NotRequired[str] # noqa: N815 + """InternalType is used for struct name aliases, may be ignored.""" + # Recursive types aren't really supported, but do partially work + # This will be expanded a few times and then replaced with Any (deeply nested) + components: NotRequired[Sequence["FuncParameterT"]] # type: ignore[misc] + """Sequence of components, each must be :class:`FuncParameterT`.""" -Fixed length: -[M] Fix sized array. int[10], uint256[33], -Dynamic length: -bytes -string -[] -''' +FUNCTION: Final = Schema( + { + "type": "function", + "name": str, + "stateMutability": MUTABILITY, + "inputs": [FUNC_PARAMETER], + "outputs": [FUNC_PARAMETER], + }, + required=True, + extra=voluptuous.REMOVE_EXTRA, +) +"""Validation :external:class:`~voluptuous.schema_builder.Schema` for ABI function. -# voluptuous is a better library in validating dict. -from voluptuous import Schema, Any, Optional -from typing import List -from typing import Union -import eth_utils -import eth_abi -from .cry import keccak256 +:meta hide-value: +.. versionchanged:: 2.0.0 + Removed not required members which are not produced by solidity compiler + by default, namely ``constant`` and ``payable``. + All non-standard parameters are silently discarded now. +""" -MUTABILITY = Schema(Any('pure', 'view', 'constant', 'payable', 'nonpayable')) +class FunctionT(TypedDict): + """Type of ABI function dictionary representation. -FUNC_PARAMETER = Schema({ - "name": str, - "type": str, - Optional("components"): list, # if the "type" field is "tuple" or "type[]" - Optional("internalType"): str - }, - required=True -) + .. versionadded:: 2.0.0 + """ + type: Literal["function"] # noqa: A003 + """Always ``function``.""" + name: str + """Function name.""" + stateMutability: StateMutabilityT # noqa: N815 + r"""Mutability (pure, view, payable or nonpayable).""" + inputs: Sequence["FuncParameterT"] + """Function parameters.""" + outputs: Sequence["FuncParameterT"] + """Function returns.""" -FUNCTION = Schema({ - "type": "function", - "name": str, - Optional("constant"): bool, - Optional("payable"): bool, + +CONSTRUCTOR: Final = Schema( + { + "type": "constructor", "stateMutability": MUTABILITY, "inputs": [FUNC_PARAMETER], - "outputs": [FUNC_PARAMETER] }, - required=True + required=True, + extra=voluptuous.REMOVE_EXTRA, ) +"""Validation :external:class:`~voluptuous.schema_builder.Schema` for ABI constructor. + +Constructor is a special function case that doesn't produce outputs and is unnamed. + +:meta hide-value: + +.. versionadded:: 2.0.0 +""" + + +class ConstructorT(TypedDict): + """Type of ABI function dictionary representation. + .. versionadded:: 2.0.0 + """ -EVENT_PARAMETER = Schema({ + type: Literal["constructor"] # noqa: A003 + """Always ``function``.""" + stateMutability: StateMutabilityT # noqa: N815 + r"""Mutability (pure, view, payable or nonpayable).""" + inputs: Sequence["FuncParameterT"] + """Constructor parameters.""" + + +EVENT_PARAMETER: Final = Schema( + { "name": str, "type": str, + voluptuous.Optional("components"): list, "indexed": bool, - Optional("internalType"): str # since 0.5.11+ + voluptuous.Optional("internalType"): str, # since 0.5.11+ }, - required=True + required=True, +) +"""Validation :external:class:`~voluptuous.schema_builder.Schema` for event parameter. + +:meta hide-value: +""" + + +@_with_doc_mro(_ParameterT) +class EventParameterT(_ParameterT): + """Type of ABI event parameter. + + .. versionadded:: 2.0.0 + """ + + indexed: bool + """Whether parameter is indexed.""" + internalType: NotRequired[str] # noqa: N815 + """InternalType is used for struct name aliases, may be ignored.""" + # Recursive types aren't really supported, but do partially work + # This will be expanded a few times and then replaced with Any (deeply nested) + components: NotRequired[Sequence["EventParameterT"]] # type: ignore[misc] + """Sequence of components, each must be :class:`EventParameterT`.""" + + +EVENT: Final = Schema( + { + "type": "event", + "name": str, + voluptuous.Optional("anonymous"): bool, + "inputs": [EVENT_PARAMETER], + } ) +"""Validation :external:class:`~voluptuous.schema_builder.Schema` for ABI event. + +:meta hide-value: +""" + + +class EventT(TypedDict): + """Type of ABI event dictionary representation. + + .. versionadded:: 2.0.0 + """ + + type: Literal["event"] # noqa: A003 + """Always ``event``.""" + name: str + """Event name.""" + inputs: Sequence["EventParameterT"] + """Event inputs.""" + anonymous: NotRequired[bool] + """Whether event is anonymous (does not include signature in ``topic``).""" + + +if TYPE_CHECKING: + base = NamedTuple("base", []) +else: + base = object + + +class FunctionResult(base): + """Mixin for :class:`~typing.NamedTuple` with convenience methods. + + It is returned from :meth:`Event.decode` and :meth:`Function.decode`. + + When obtained from ``decode`` method of :class:`Function` or :class:`Event`, + this class will contain decoded parameters. They can be obtained either by name + or by numeric index as from plain tuples. + + .. versionadded:: 2.0.0 + + Warning + ------- + Names of result items can slightly differ from names in definition. + See details below. + + See Also + -------- + :meth:`FunctionResult.name_to_identifier`: Details of names changing. + + :meth:`Function.decode`: for examples of items access + """ + + def to_dict(self) -> Dict[str, Any]: + """Return dictionary representation (recursively). + + Returns + ------- + Dict[str, Any] + Dictionary of form ``{name: value}`` + (all inner namedtuples are converted too) + + Note + ---- + This method reverts name changing, except empty strings. + Unnamed parameters will be still represented as ``ret_{i}``, + while python keywords are restored (so ``from_`` is again ``from`` key). + """ + return { + self.name_from_identifier(k): ( + v.to_dict() + if isinstance(v, FunctionResult) + else ([v_.to_dict() for v_ in v] if isinstance(v, list) else v) + ) + for k, v in self._asdict().items() + } + + def __getattr__(self, name: str) -> NoReturn: + """Dot attribute access (if not found). + + This is needed to make mypy happy with mix of this and dynamic namedtuple. + We could use a mypy plugin to resolve names dynamically, but it is too + difficult with small benefits. Now any attribute access is allowed, + but all types are Any. If type-checking is very important, make sure to + `assert` proper types to narrow them. + """ + raise AttributeError(f"{self!r} does not have attribute '{name}'.") + + @staticmethod + def name_to_identifier(word: str, position: int = 0) -> str: + """Convert given word to valid python identifier. + + It assumes that ``word`` is a valid ``solidity`` identifier or empty string. + + The following rules apply: + + - Empty string are converted to ``f"ret_{position}"`` + - Python keyword (maybe already with underscores at the end) + gets underscore (``_``) appended + - All other words are returned unchanged. + + Parameters + ---------- + word: str + Solidity identifier to make compatible. + position: int + Arbitrary integer, unique for your collection + (different for different calls). + + Returns + ------- + str + Valid python identifier. + + Raises + ------ + ValueError + If given string is not a valid solidity identifier. + Examples + -------- + >>> FunctionResult.name_to_identifier('foo') + 'foo' -EVENT = Schema({ - "type": "event", - "name": str, - Optional("anonymous"): bool, - "inputs": [EVENT_PARAMETER] -}) + >>> FunctionResult.name_to_identifier('') + 'ret_0' + >>> FunctionResult.name_to_identifier('', 1) + 'ret_1' -def is_dynamic_type(t: str): - ''' Check if the input type is dynamic ''' - if t == 'bytes' or t == 'string' or t.endswith('[]'): - return True - else: - return False + >>> FunctionResult.name_to_identifier('for') + 'for_' + >>> FunctionResult.name_to_identifier('from_') + 'from__' -def dynamic_type_to_topic(t_type:str, value): - if t_type == 'string': - return keccak256([value.encode('utf-8')])[0] - elif t_type == 'bytes': - return keccak256([value])[0] - else: - raise ValueError('complex value type {} is not supported yet, open an issue on Github.'.format(t_type)) + >>> FunctionResult.name_to_identifier('1f') + Traceback (most recent call last): + ValueError: Invalid identifier given: '1f' + """ + if not word: + return f"ret_{position}" + if not word.isidentifier(): + raise ValueError(f"Invalid identifier given: '{word}'") -def calc_function_selector(abi_json: dict) -> bytes: - ''' Calculate the function selector (4 bytes) from the abi json ''' + if iskeyword(word.rstrip("_")): + return f"{word}_" + return word + + @staticmethod + def name_from_identifier(word: str) -> str: + r"""Reverse conversion to valid python identifier. + + It assumes that ``word`` was a result of + :meth:`FunctionResult.name_to_identifier`. + + The following rules apply: + + - Word that are of form ``keyword(_)+`` (with at least one + underscore ``_`` at the end) lose one underscore + - All other words are returned unchanged. + + Parameters + ---------- + word: str + Identifier to reverse. + + Returns + ------- + str + Valid solidity identifier. + + Examples + -------- + >>> FunctionResult.name_from_identifier('foo') + 'foo' + + >>> FunctionResult.name_from_identifier('ret_0') + 'ret_0' + + >>> FunctionResult.name_from_identifier('for_') + 'for' + + >>> FunctionResult.name_from_identifier('from__') + 'from_' + """ + if word.endswith("_") and iskeyword(word.rstrip("_")): + return word[:-1] + return word + + +def calc_function_selector(abi_json: FunctionT) -> bytes: + """Calculate the function selector (4 bytes) from the ABI json.""" f = FUNCTION(abi_json) return eth_utils.function_abi_to_4byte_selector(f) -def calc_event_topic(abi_json: dict) -> bytes: - ''' Calculate the event log topic (32 bytes) from the abi json''' +def calc_event_topic(abi_json: EventT) -> bytes: + """Calculate the event log topic (32 bytes) from the ABI json.""" e = EVENT(abi_json) return eth_utils.event_abi_to_log_topic(e) -class Coder(): +class Coder: + """Convenient wrapper to namespace encoding functions.""" + @staticmethod - def encode_list(types: List[str], values) -> bytes: - ''' Encode a sequence of values, into a single bytes ''' + def encode_list(types: Sequence[str], values: Sequence[Any]) -> bytes: + """Encode a sequence of values, into a single bytes.""" return eth_abi.encode_abi(types, values) @staticmethod - def decode_list(types: List[str], data: bytes) -> List: - ''' Decode the data, back to a (,,,) tuple ''' + def decode_list(types: Sequence[str], data: bytes) -> List[Any]: + """Decode the data, back to a ``(...)`` tuple.""" return list(eth_abi.decode_abi(types, data)) - + @staticmethod - def encode_single(t: str, value) -> bytes: - ''' Encode value of type t into single bytes''' + def encode_single(t: str, value: Any) -> bytes: + """Encode value of type ``t`` into single bytes.""" return Coder.encode_list([t], [value]) @staticmethod - def decode_single(t: str, data): - ''' Decode data of type t back to a single object''' + def decode_single(t: str, data: bytes) -> Any: + """Decode data of type ``t`` back to a single object.""" return Coder.decode_list([t], data)[0] -class Function(): - def __init__(self, f_definition: dict): - '''Initialize a function by definition. +# The first should be right, but results in a crash. +# See https://github.com/python/mypy/issues/8320 +# _ParamT = TypeVar("_ParamT", EventParameterT, FuncParameterT) +_ParamT = TypeVar("_ParamT", bound=_ParameterT) +_BaseT = TypeVar("_BaseT") +_T = TypeVar("_T") +_Self = TypeVar("_Self", bound="Encodable[Any]") + +if sys.version_info >= (3, 9) or TYPE_CHECKING: + _PathT = Union[str, os.PathLike[str]] +else: + _PathT = Union[str, os.PathLike] + + +class _WithName: + _definition: Union[EventT, FunctionT] + + @property + def name(self) -> str: + """Get name of object. + + .. versionadded:: 2.0.0 + """ + return self._definition["name"] + + @deprecated_to_property + def get_name(self) -> str: + """Get name of object. + + .. customtox-exclude:: + + .. deprecated:: 2.0.0 + Use :attr:`name` property instead. + """ + return self.name + + +class Encodable(Generic[_ParamT], ABC): + """Base class for :class:`Function` and :class:`Event`. + + .. versionadded:: 2.0.0 + """ + + _definition: Union[FunctionT, ConstructorT, EventT] + + @abstractmethod + def __init__(self, definition: Any) -> None: + raise NotImplementedError() + + @abstractmethod + def encode( + self, __parameters: Sequence[Any] + ) -> Union[bytes, str, List[Optional[bytes]]]: + """Encode parameters into bytes.""" + raise NotImplementedError() + + @abstractmethod + def decode(self, __data: bytes) -> FunctionResult: + """Decode data from bytes to namedtuple.""" + raise NotImplementedError() + + @classmethod + def make_proper_type(cls, elem: _ParamT) -> str: + """Extract type string (inline tuples) from JSON.""" + return eth_utils.abi.collapse_if_tuple(dict(elem)) + + @staticmethod + def _make_output_namedtuple_type( + name: str, types: Iterable[_ParamT] + ) -> Type[FunctionResult]: + top_names = [ + FunctionResult.name_to_identifier(t["name"], i) for i, t in enumerate(types) + ] + return type(name, (namedtuple(name, top_names), FunctionResult), {}) + + @classmethod + def _demote_type(cls, typeinfo: _ParamT) -> Tuple[_ParamT, bool]: + # We don't have to support nested stuff like (uint256, bool[4])[], + # because type in JSON will be tuple[], uint256 and bool[4] in this case + # without nesting in string + type_ = typeinfo["type"] + new_type_ = re.sub(r"(\[\d*\])$", r"", type_) + if new_type_ == type_: + return typeinfo.copy(), False + + new_type = typeinfo.copy() + new_type["type"] = new_type_ + return new_type, True + + @classmethod + def apply_recursive_names( + cls, + value: Any, + typeinfo: _ParamT, + chain: Optional[Sequence[str]] = None, + ) -> Union[FunctionResult, List[FunctionResult], Any]: + """Build namedtuple from values. + + .. customtox-exclude:: + """ + if not typeinfo["type"].startswith("tuple"): + return value + + chain = [*(chain or []), typeinfo["name"].title() or "NoName"] + + new_type, demoted = cls._demote_type(typeinfo) + if demoted: + return [cls.apply_recursive_names(v, new_type, chain[:-1]) for v in value] + + components = cast(List[_ParamT], typeinfo.get("components", [])) + NewType = cls._make_output_namedtuple_type("_".join(chain), components) + return NewType( + *( + cls.apply_recursive_names(v, t, chain) + for t, v in izip(components, value) + ) + ) + + @classmethod + def _normalize_values_dict( + cls, + values: Mapping[str, Any], + expected: Union[_ParamT, Sequence[_ParamT]], + ) -> Iterator[Any]: + assert isinstance(values, Mapping) + + if isinstance(expected, Mapping): + components = expected.get("components", []) + else: + components = expected + if len(values) != len(components): + raise ValueError( + f"Invalid keys count, expected {len(components)}, got {len(values)}" + ) + + for typeinfo in components: + name = typeinfo.get("name") + if not name: + raise ValueError( + "Cannot serialize mapping when some types are unnamed." + ) + + try: + val = values[name] + except KeyError: + raise ValueError(f"Missing key for output: {name}.") + + yield cls._normalize_values(val, typeinfo) + + @overload + @classmethod + def _normalize_values( + cls, + values: Mapping[str, Any], + expected: Union[_ParamT, Sequence[_ParamT]], + ) -> Tuple[Any, ...]: + ... + + @overload + @classmethod + def _normalize_values( + cls, + values: Sequence[Any], + expected: Union[_ParamT, Sequence[_ParamT]], + ) -> Sequence[Any]: + ... + + @classmethod + def _normalize_values( + cls, + values: object, + expected: Union[_ParamT, Sequence[_ParamT]], + ) -> object: + if isinstance(values, Mapping): + values = tuple(cls._normalize_values_dict(values, expected)) + + if not ( + isinstance(values, Sequence) + # Primary types + and not isinstance(values, (str, bytes, bytearray)) + ): + return values + + if isinstance(expected, Sequence): + return tuple(cls._normalize_values(v, t) for v, t in izip(values, expected)) + type_ = expected["type"] + new_type, demoted = cls._demote_type(expected) + if demoted: + return tuple(cls._normalize_values(v, new_type) for v in values) + elif "tuple" in type_: + components = cast(List[_ParamT], expected.get("components", [])) + assert components, "Missing components for tuple." + return tuple( + cls._normalize_values(v, t) for v, t in izip(values, components) + ) + else: + # Give up, maybe it is inline type like {'type': '(str,int)'} + return tuple(values) + + @classmethod + def _to_final_type( + cls, name: str, values: Iterable[Any], types: Iterable[_ParamT] + ) -> FunctionResult: + NewType = cls._make_output_namedtuple_type(name, types) + return NewType( + *( + cls.apply_recursive_names(value, typeinfo) + for typeinfo, value in izip(types, values) + ) + ) + + @classmethod + def from_solidity( + cls: Type[_Self], + *, + text: Optional[str] = None, + file: Optional[_PathT] = None, + name: Optional[str] = None, + version: Optional[str] = None, + ) -> _Self: + """Instantiate :class:`Encodable` from solidity definition. + + .. versionadded:: 2.0.0 Parameters ---------- - f_definition : dict - See FUNCTION type in this document. - ''' - self._definition = FUNCTION(f_definition) # Protect. - self.selector = calc_function_selector(f_definition) # first 4 bytes. - - def get_selector(self) -> bytes: - return self.selector - - def get_name(self) -> str: - return self._definition['name'] - - def encode(self, parameters: List, to_hex=False) -> Union[bytes, str]: - '''Encode the paramters according to the function definition. + text: str or None (keyword-only) + Program text. + file: os.PathLike or Path or None (keyword-only) + File with program source. + name: str or None + Name of encodable to extract. + version: str or None (keyword-only) + Solidity version (supported by :func:`~solcx.install_solc`) + or ``None`` to use default. + + Raises + ------ + ValueError + If required type (event or function) cannot be uniquely extracted. + :exc:`~solcx.exceptions.SolcError` + If input is not a valid solidity code. + + See Also + -------- + :external+solcx:doc:`index`: underlying library reference. + """ + + def compile_() -> Dict[str, Any]: + if file is not None: + return solcx.compile_files( + [file], output_values=["abi"], solc_version=version + ) + elif text is not None: + return solcx.compile_source( + text, output_values=["abi"], solc_version=version + ) + else: # pragma: no cover + raise TypeError("Please specify either file or text.") + + try: + result = compile_() + except solcx.exceptions.SolcNotInstalled: + solcx.install_solc(version or "latest") + result = compile_() + + all_items = [e for g in result.values() for e in g["abi"]] # Flatten + given = [e for e in all_items if e["type"] == cls.__name__.lower()] + if name is not None: + given = [e for e in given if e.get("name") == name] + + if not given: + raise ValueError("Missing value of expected type.") + elif len(given) > 1: + raise ValueError( + f"Ambiguous input: more than one {cls.__name__.lower()} given." + ) + + return cls(given[0]) + + +class FunctionBase(Encodable[FuncParameterT]): + """Base class for ABI functions (function itself and constructor). + + .. versionadded:: 2.0.0 + """ + + _definition: Union[FunctionT, ConstructorT] + + def encode(self, parameters: Union[Sequence[Any], Mapping[str, Any]]) -> bytes: + r"""Encode the parameters according to the function definition. + + Parameters + ---------- + parameters : Sequence[Any] or Mapping[str, Any] + A list of parameters waiting to be encoded, + or a mapping from names to values. + + Returns + ------- + bytes + Encoded value + """ + inputs = self._definition["inputs"] + my_types = [self.make_proper_type(x) for x in inputs] + + norm_parameters = self._normalize_values(parameters, inputs) + + return self.selector + Coder.encode_list(my_types, norm_parameters) + + def decode_parameters(self, value: bytes) -> FunctionResult: + """Decode parameters back to values. + + .. versionadded:: 2.0.0 + + Parameters + ---------- + value: bytes + Data to decode. + + Returns + ------- + FunctionResult + Decoded values. + """ + my_types = [self.make_proper_type(x) for x in self._definition["inputs"]] + # Strip signature + result_list = Coder.decode_list(my_types, value[len(self.selector) :]) + + return self._to_final_type("InType", result_list, self._definition["inputs"]) + + @property + @abstractmethod + def selector(self) -> bytes: + """Selector to prepend to encoded data.""" + raise NotImplementedError() + + +class Constructor(FunctionBase): + """ABI constructor function. + + .. versionadded:: 2.0.0 + + Examples + -------- + >>> body = { + ... 'type': 'constructor', + ... 'inputs': [{'type': 'int', 'name': 'x'}], + ... 'stateMutability': 'nonpayable', + ... } + >>> Constructor(body) # doctest:+ELLIPSIS + + + Or create from contract: + + >>> contract = r'contract A { constructor(int x) {} }' + >>> Constructor.from_solidity(text=contract) # doctest:+ELLIPSIS + + """ + + def __init__(self, definition: ConstructorT) -> None: + """Initialize a constructor by definition. + + Parameters + ---------- + definition : ConstructorT + A dict with style of :const:`CONSTRUCTOR` + """ + self._definition: ConstructorT = CONSTRUCTOR(definition) # Protect. + + @property + def selector(self) -> bytes: + """Empty bytes, because constructor is unnamed.""" + return b"" + + def decode(self, data: bytes) -> NoReturn: + """Constructor does not have outputs, so nothing to decode.""" + raise AttributeError("Constructor cannot have outputs!") + + +_dummy = object() + + +class Function(_WithName, FunctionBase): + """ABI Function.""" + + def __init__(self, definition: FunctionT) -> None: + """Initialize a function by definition. + + .. versionchanged:: 2.0.0 + Argument renamed from ``f_definition`` to ``definition``. Parameters ---------- - parameters : List - A list of parameters waiting to be encoded. - to_hex : bool, optional - If the return should be '0x...' hex string, by default False + definition : FunctionT + A dict with style of :const:`FUNCTION` + """ + self._definition: FunctionT = FUNCTION(definition) # Protect. + self._selector: bytes = calc_function_selector(self._definition) + + @property + def selector(self) -> bytes: + """First 4 bytes of function signature hash. + + .. versionadded:: 2.0.0 + """ + return self._selector + + @overload + def encode( + self, parameters: Union[Sequence[Any], Mapping[str, Any]], to_hex: Literal[True] + ) -> str: + ... + + @overload + def encode( + self, + parameters: Union[Sequence[Any], Mapping[str, Any]], + to_hex: Literal[False] = ..., + ) -> bytes: + ... + + def encode( + self, + parameters: Union[Sequence[Any], Mapping[str, Any]], + to_hex: object = _dummy, + ) -> Union[bytes, str]: + r"""Encode the parameters according to the function definition. + + .. versionchanged:: 2.0.0 + parameter ``to_hex`` is deprecated, use ``"0x" + result.hex()`` + directly instead. + + Parameters + ---------- + parameters : Sequence[Any] or Mapping[str, Any] + A list of parameters waiting to be encoded, + or a mapping from names to values. + to_hex : bool, default: False + If the return should be ``0x...`` hex string Returns ------- - Union[bytes, str] - Return bytes or '0x...' hex string if needed. - ''' - my_types = [x['type'] for x in self._definition['inputs']] - my_bytes = self.selector + Coder.encode_list(my_types, parameters) - if to_hex: - return '0x' + my_bytes.hex() + bytes + By default or if ``to_hex=False`` was passed. + str + If ``to_hex=True`` was passed. + + Examples + -------- + Encode sequence: + + >>> func = Function({ + ... 'inputs': [{'internalType': 'string', 'name': '', 'type': 'string'}], + ... 'outputs': [], + ... 'name': 'myFunction', + ... 'stateMutability': 'pure', + ... 'type': 'function', + ... }) + >>> enc = func.encode(['foo']) + >>> assert enc == ( + ... func.selector + ... + b'\x20'.rjust(32, b'\x00') # Address of argument + ... + b'\x03'.rjust(32, b'\x00') # Length + ... + b'foo'.ljust(32, b'\x00') # String itself + ... ) + + Encode mapping: + + >>> func = Function({ + ... 'inputs': [{'internalType': 'string', 'name': 'arg', 'type': 'string'}], + ... 'outputs': [], + ... 'name': 'myFunction', + ... 'stateMutability': 'pure', + ... 'type': 'function', + ... }) + >>> enc = func.encode({'arg': 'foo'}) + >>> assert enc == ( + ... func.selector + ... + b'\x20'.rjust(32, b'\x00') # Address of argument + ... + b'\x03'.rjust(32, b'\x00') # Length + ... + b'foo'.ljust(32, b'\x00') # String itself + ... ) + """ + my_bytes = super().encode(parameters) + + if to_hex is not _dummy: + warnings.warn( + DeprecationWarning( + "to_hex parameter is deprecated. " + "Use ``'0x' + output.hex()`` instead to replicate that behaviour" + ) + ) + if to_hex and to_hex is not _dummy: + return "0x" + my_bytes.hex() else: return my_bytes - - def decode(self, output_data: bytes) -> dict: - '''Decode function call output data back into human readable results. - The result is in dual format. Contains both position and named index. - eg. { '0': 'john', 'name': 'john' } - ''' - my_types = [x['type'] for x in self._definition['outputs']] - my_names = [x['name'] for x in self._definition['outputs']] + def decode(self, output_data: bytes) -> FunctionResult: + """Decode function call output data back into human readable results. + + The result is a dynamic subclass of + :class:`typing.NamedTuple` (:func:`collections.namedtuple` return type) + and :class:`FunctionResult` + + .. versionchanged:: 2.0.0 + Return type is not a dict anymore. + Parameters + ---------- + output_data : bytes + Data to decode. + + Returns + ------- + FunctionResult + Decoded data. + + Examples + -------- + >>> data = { + ... "inputs": [], + ... "name": "getStr", + ... "outputs": [{"name": "memory", "type": "string"}], + ... "stateMutability": "pure", + ... "type": "function", + ... } + >>> func = Function(data) + >>> memory = b"Hello world!" # encoded string + >>> binary = bytes.fromhex( + ... "20".rjust(64, "0") # address of first argument + ... + hex(len(memory))[2:].rjust(64, "0") # length of string + ... + memory.hex().ljust(64, "0") # content + ... ) + >>> result = func.decode(binary) + >>> result.memory # Access by name + 'Hello world!' + + >>> result[0] # Access by index + 'Hello world!' + + >>> result.to_dict() # Convert to dictionary + {'memory': 'Hello world!'} + + With unnamed attributes: + + >>> data = { + ... "inputs": [], + ... "name": "getBool", + ... "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], + ... "stateMutability": "pure", + ... "type": "function", + ... } + >>> func = Function(data) + >>> result = func.decode(bytes.fromhex("1".rjust(64, "0"))) + >>> result.ret_0 # Access by name + True + + >>> result[0] # Access by index + True + + >>> result.to_dict() # Convert to dictionary + {'ret_0': True} + """ + outputs = self._definition["outputs"] + my_types = [self.make_proper_type(x) for x in outputs] result_list = Coder.decode_list(my_types, output_data) - r = {} - for idx, name in enumerate(my_names): - r[str(idx)] = result_list[idx] - if name: - r[name] = result_list[idx] - - return r + return self._to_final_type("OutType", result_list, self._definition["outputs"]) + def encode_outputs(self, values: Union[Sequence[Any], Mapping[str, Any]]) -> bytes: + """Encode the return values according to the function definition. -class Event(): - def __init__(self, e_definition: dict): - '''Initialize an Event with definition. + .. versionadded:: 2.0.0 Parameters ---------- - e_definition : dict - A dict with style of EVENT. - ''' - self._definition = EVENT(e_definition) - self.signature = calc_event_topic(self._definition) - - def get_name(self) -> str: - return self._definition['name'] + values : Sequence[Any] or Mapping[str, Any] + A list of parameters waiting to be encoded, + or a mapping from names to values. - def get_signature(self) -> bytes: - return self.signature + Returns + ------- + bytes + Encoded output values. + + Raises + ------ + ValueError + If mapping was given for unnamed parameters + or mapping keys are not the same as output names. + """ + outputs = self._definition["outputs"] + my_types = [self.make_proper_type(x) for x in outputs] + + return Coder.encode_list(my_types, self._normalize_values(values, outputs)) + + @classmethod + def from_solidity( + cls, + *, + text: Optional[str] = None, + file: Optional[_PathT] = None, + name: Optional[str] = None, + version: Optional[str] = None, + ) -> "Function": + """Instantiate :class:`Function` from solidity definition. + + .. versionadded:: 2.0.0 - def encode(self, params: Union[dict, List]) -> List: - '''Assemble indexed keys into topics. + Parameters + ---------- + text: str or None (keyword-only) + Program text. + file: os.PathLike or Path or None (keyword-only) + File with program source. + name: str or None + Name of function to select. Do not filter by name if ``None``. + version: str or None (keyword-only) + Solidity version (supported by :func:`~solcx.install_solc`) + or ``None`` to use default. - Usage - ----- + Raises + ------ + ValueError + If required type (event or function) cannot be uniquely extracted. + :exc:`~solcx.exceptions.SolcError` + If input is not a valid solidity code. + + See Also + -------- + :external+solcx:doc:`index`: underlying library reference. + + Examples + -------- + >>> from pprint import pprint + >>> contract = ''' + ... contract A { + ... function f(uint x) public returns(bool) {} + ... } + ... ''' + >>> func = Function.from_solidity(text=contract) + >>> pprint(func._definition) + {'inputs': [{'internalType': 'uint256', 'name': 'x', 'type': 'uint256'}], + 'name': 'f', + 'outputs': [{'internalType': 'bool', 'name': '', 'type': 'bool'}], + 'stateMutability': 'nonpayable', + 'type': 'function'} + + No matching function: + + >>> Function.from_solidity(text='contract A { event E(int x); }') + Traceback (most recent call last): + ValueError: Missing value of expected type. + + Many matching functions: + + >>> contract = ''' + ... contract A { + ... function f1(int x) public {} + ... function f2() public {} + ... } + ... ''' + >>> Function.from_solidity(text=contract) + Traceback (most recent call last): + ValueError: Ambiguous input: more than one function given. + + Many matching functions, select by name: + + >>> contract = ''' + ... contract A { + ... function f1(int x) public {} + ... function f2() public {} + ... } + ... ''' + >>> func = Function.from_solidity(text=contract, name='f2') + >>> pprint(func._definition) + {'inputs': [], + 'name': 'f2', + 'outputs': [], + 'stateMutability': 'nonpayable', + 'type': 'function'} + + Syntax error: + + >>> Function.from_solidity( + ... text='contract A { function x() {} }' + ... ) # doctest:+IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + solcx.exceptions.SolcError: An error occurred during execution + """ + return super().from_solidity(text=text, file=file, name=name, version=version) + + @deprecated_to_property + def get_selector(self) -> bytes: + """First 4 bytes of function signature hash. + + .. customtox-exclude:: + + .. deprecated:: 2.0.0 + Use :attr:`selector` property instead. + """ + return self.selector + + +class Event(_WithName, Encodable[EventParameterT]): + """ABI Event.""" + + def __init__(self, definition: EventT) -> None: + """Initialize an Event with definition. + + .. versionchanged:: 2.0.0 + Argument renamed from ``e_definition`` to ``definition``. + + Parameters + ---------- + e_definition : EventT + A dict with style of :const:`EVENT`. - Commonly used to filter out logs of concerned topics, - eg. To filter out VIP180 transfer logs of a certain wallet, certain amount. + Raises + ------ + ValueError + If number of indexed parameters exceeds the limit. + Invalid + If given definition is malformed. + """ + self._definition: EventT = EVENT(definition) + self._signature: bytes = calc_event_topic(self._definition) + + self.indexed_params: List[EventParameterT] = [ + x for x in self._definition["inputs"] if x["indexed"] + ] + + if len(self.indexed_params) - int(self.is_anonymous) > 3: + raise ValueError("Too much indexed parameters!") + + self.unindexed_params: List[EventParameterT] = [ + x for x in self._definition["inputs"] if not x["indexed"] + ] + + @property + def is_anonymous(self) -> bool: + """Whether this event is anonymous. + + .. versionadded:: 2.0.0 + """ + return self._definition.get("anonymous", False) + + @property + def signature(self) -> bytes: + """First 4 bytes of event signature hash. + + .. versionadded:: 2.0.0 + """ + return self._signature + + @classmethod + def is_dynamic_type(cls, t: str) -> bool: + """Check if the input type requires hashing in indexed parameter. + + All bytes, strings and dynamic arrays are dynamic, plus all structs and + fixed-size arrays are hashed (see `Specification`_). + """ # Reference is defined in `abi.rst` + return t in {"bytes", "string"} or "[" in t or t.startswith("tuple") + + @staticmethod + def _strip_dynamic_part(type_: str) -> str: + return type_.split("[")[0] + + @staticmethod + def pad( + data: Union[Sequence[bytes], bytes], + mod: int = 32, + to: Literal["r", "l"] = "l", + ) -> bytes: + r"""Join sequence of bytes together and pad to multiple of ``mod``. + + .. versionadded:: 2.0.0 Parameters ---------- - params : Union[dict, List] - A dict/list of indexed param of the given event, - fill in None to occupy the position, - if you aren't sure about the value. - - eg. For event: - - EventName(address from indexed, address to indexed, uint256 value) - - the params can be: - ['0xa32f..ff', '0x1f...ac'] - or: - {'from': '0xa32f..ff', 'to': '0x1f...ac'} - or: - [None, '0x1f...ac'] - or: - {'from': None, 'to': '0x1f...ac'} + data: bytes or Sequence[bytes] + Data to process. + mod: int, default: 32 + Length unit (bytes are padded to multiple of this parameter) + to: Literal["r", "l"] + Pad to left or to right. Returns ------- - List - [description] + bytes + Given sequence joined and padded to multiple of ``mod``. + + Examples + -------- + >>> Event.pad(b'foo', 32, 'l').hex() + '666f6f0000000000000000000000000000000000000000000000000000000000' + + >>> Event.pad(b'\x07', 16, 'r').hex() + '00000000000000000000000000000007' + + >>> Event.pad([b'foo', b'bar'], 32, 'l').hex() + '666f6f6261720000000000000000000000000000000000000000000000000000' + + >>> Event.pad([b'\x07', b'\x04'], 16, 'r').hex() + '00000000000000000000000000000704' + """ + if not isinstance(data, (bytes, bytearray)): + data = b"".join(data) + + length = len(data) + missing = (mod * (length // mod + 1) - length) % mod + if to == "l": + return bytes(data) + missing * b"\x00" + else: + return missing * b"\x00" + bytes(data) + + @classmethod + def dynamic_type_to_topic(cls, type_: EventParameterT, value: Any) -> List[bytes]: + """Encode single value according to given ``type_``.""" + t_type = type_["type"] + new_type, demoted = cls._demote_type(type_) + if demoted: + return [ + cls.pad(cls.dynamic_type_to_topic(new_type, v), 32, "l") for v in value + ] + + if t_type.startswith("tuple"): + return [ + cls.pad(cls.dynamic_type_to_topic(t, v), 32, "l") + for t, v in izip(type_["components"], value) + ] + + if t_type == "string": + assert isinstance(value, str), 'Value of type "string" must be str' + return [value.encode("utf-8")] + elif t_type == "bytes": + assert isinstance( + value, (bytes, bytearray) + ), 'Value of type "bytes" must be bytes' + return [value] + else: + return [Coder.encode_single(cls._strip_dynamic_part(t_type), value)] + + def encode( + self, parameters: Union[Mapping[str, Any], Sequence[Any]] + ) -> List[Optional[bytes]]: + r"""Assemble indexed keys into topics. + + Commonly used to filter out logs of concerned topics, e.g. to filter out + `VIP180 `_ + transfer logs of a certain wallet, certain amount. + + Parameters + ---------- + parameters : Mapping[str, Any] or Sequence[Any] + A dict/list of indexed parameters of the given event. + Fill in :class:`None` to occupy the position, if you aren't sure + about the value. + + Returns + ------- + List[bytes or None] + Encoded parameters with :class:`None` preserved from input. Raises ------ + TypeError + Unknown parameters type (neither mapping nor sequence) ValueError - [description] - ''' - topics = [] + If there is unnamed parameter in definition and dict of parameters is given, + or if parameters count doesn't match the definition. + + Examples + -------- + Let's say we have + + .. code-block:: text + + MyEvent(address from indexed, address to indexed, uint256 value) + + Then corresponding event is + + >>> event = Event({ + ... 'inputs': [ + ... {'name': 'from', 'indexed': True, 'type': 'address'}, + ... {'name': 'to', 'indexed': True, 'type': 'address'}, + ... {'name': 'value', 'indexed': False, 'type': 'uint256'}, + ... ], + ... 'name': 'MyEvent', + ... 'type': 'event', + ... }) + + We can use it to encode all topics: + + >>> address_from = '0x' + 'f' * 40 + >>> address_to = '0x' + '9' * 40 + >>> enc = event.encode([address_from, address_to]) + >>> assert tuple(enc) == ( + ... event.signature, + ... bytes.fromhex(hex(int(address_from, 16))[2:].rjust(64, '0')), + ... bytes.fromhex(hex(int(address_to, 16))[2:].rjust(64, '0')), + ... ) + + Note the interesting conversion here: ``address`` is equivalent to ``uint160``, + so one would expect just ``bytes.fromhex(address_from[2:])``, right? + Indexed event parameters are **always** padded to 32 bytes too, even if they + are shorter. Numbers are padded to the right (or as two's complement, + if negative), strings and bytes - to the left. + + Or we can convert only some of params: + + >>> enc = event.encode([address_from, None]) + >>> assert tuple(enc) == ( + ... event.signature, + ... bytes.fromhex(hex(int(address_from, 16))[2:].rjust(64, '0')), + ... None, + ... ) + + Mapping is also accepted for named parameters: + + >>> enc = event.encode({'from': address_from, 'to': None}) + >>> assert tuple(enc) == ( + ... event.signature, + ... bytes.fromhex(hex(int(address_from, 16))[2:].rjust(64, '0')), + ... None, + ... ) + + """ + topics: List[Optional[bytes]] = [] + + parameters = self._normalize_values(parameters, self.indexed_params) # not anonymous? topic[0] = signature. - if self._definition.get('anonymous', False) == False: + if not self.is_anonymous: topics.append(self.signature) - indexed_params = [x for x in self._definition['inputs'] if x['indexed']] - has_no_name_param = any([True for x in indexed_params if not x['name']]) + def encode(param: Any, definition: EventParameterT) -> bytes: + if self.is_dynamic_type(definition["type"]): + return keccak256(self.dynamic_type_to_topic(definition, param))[0] + else: + return Coder.encode_single(self.make_proper_type(definition), param) + + if ( + isinstance(parameters, Sequence) + and not isinstance(parameters, (bytes, bytearray)) + # bytes are Sequence too! + ): + for param, definition in izip(parameters, self.indexed_params): + topics.append(param if param is None else encode(param, definition)) + else: + raise TypeError( + f"Expected sequence or mapping of parameters, got: {type(parameters)}" + ) - # Check #1 - if type(params) != list and has_no_name_param: - raise ValueError('Event definition contains param without a name, use a list of params instead of dict.') + return list(topics) - # Check #2 - if type(params) == list and len(params) != len(indexed_params): - raise ValueError('Indexed params needs {} length, {} is given.'.format(len(indexed_params), len(params))) + def encode_data(self, parameters: Union[Mapping[str, Any], Sequence[Any]]) -> bytes: + """Encode unindexed parameters into bytes. - # Check #3 - if type(params) == dict and len(params.keys()) != len(indexed_params): - raise ValueError('Indexed params needs {} keys, {} is given.'.format(len(indexed_params), len(params.keys()))) + .. versionadded:: 2.0.0 - if type(params) == list: - for param, definition in zip(params, indexed_params): - if is_dynamic_type( definition['type'] ): - topics.append( dynamic_type_to_topic(definition['type'], param) ) - else: - topics.append( Coder.encode_single(definition['type'], param) ) + Parameters + ---------- + parameters: Mapping[str, Any] or Sequence[Any] + A dict/list of unindexed parameters of the given event. - if type(params) == dict: - for definition in indexed_params: - value = params.get(definition['name'], None) - if value is None: - topics.append(value) - continue + Returns + ------- + bytes + Encoded result. + + Examples + -------- + >>> event = Event({ + ... 'inputs': [ + ... {'name': 'from', 'indexed': True, 'type': 'address'}, + ... {'name': 'value', 'indexed': False, 'type': 'uint256'}, + ... {'name': 'to', 'indexed': True, 'type': 'address'}, + ... {'name': 'value2', 'indexed': False, 'type': 'uint64'}, + ... ], + ... 'name': 'MyEvent', + ... 'type': 'event', + ... }) + + We can use it to encode values as a sequence: + + >>> enc = event.encode_data([256, 129]) # 256 == 0x100, 129 == 0x81 + >>> assert enc.hex() == '100'.rjust(64, '0') + '81'.rjust(64, '0') + + Or as a mapping: + + >>> enc = event.encode_data({'value': 256, 'value2': 129}) + >>> assert enc.hex() == '100'.rjust(64, '0') + '81'.rjust(64, '0') + """ + parameters = self._normalize_values(parameters, self.unindexed_params) + my_types = list(map(self.make_proper_type, self.unindexed_params)) + return Coder.encode_list(my_types, parameters) + + def encode_full( + self, parameters: Union[Mapping[str, Any], Sequence[Any]] + ) -> Tuple[List[Optional[bytes]], bytes]: + r"""Encode both indexed and unindexed parameters. + + .. versionadded:: 2.0.0 - if is_dynamic_type( definition['type'] ): - topics.append( dynamic_type_to_topic(definition['type'], value) ) - else: - topics.append( Coder.encode_single(definition['type'], value) ) + Parameters + ---------- + parameters: Mapping[str, Any] or Sequence[Any] + A dict/list of all parameters of the given event. - return topics + Returns + ------- + Tuple[List[bytes or None], bytes] + Tuple + with first item being :meth:`Event.encode` result + and second item being :meth:`Event.encode_data` result. + + Raises + ------ + ValueError + If some required parameters were missing, + of some extra parameters were given. + TypeError + If given parameters are neither sequence nor mapping. + + Examples + -------- + >>> event = Event({ + ... 'inputs': [ + ... {'name': 'from', 'indexed': True, 'type': 'address'}, + ... {'name': 'value', 'indexed': False, 'type': 'uint256'}, + ... {'name': 'to', 'indexed': True, 'type': 'address'}, + ... {'name': 'value2', 'indexed': False, 'type': 'uint64'}, + ... ], + ... 'name': 'MyEvent', + ... 'type': 'event', + ... }) + >>> address_from = '0x' + 'f' * 40 + >>> address_to = '0x' + '9' * 40 + + Expected values: + + >>> topics_enc = event.encode([address_from, address_to]) + >>> data_enc = event.encode_data([256, 127]) + + Now with :meth:`Event.encode_full`: + + >>> topics, data = event.encode_full([address_from, 256, address_to, 127]) + >>> assert topics == topics_enc + >>> assert data == data_enc + + Or in mapping form (note that order doesn't matter): + + >>> topics, data = event.encode_full({ + ... 'to': address_to, + ... 'value': 256, + ... 'value2': 127, + ... 'from': address_from, + ... }) + >>> assert topics == topics_enc + >>> assert data == data_enc + + """ + unindexed: Union[List[Any], Dict[str, Any]] + indexed: Union[List[Any], Dict[str, Any]] + + if isinstance(parameters, Mapping): + try: + unindexed = { + p["name"]: parameters[p["name"]] for p in self.unindexed_params + } + indexed = { + p["name"]: parameters[p["name"]] for p in self.indexed_params + } + except KeyError as e: + raise ValueError(f"Key '{e.args[0]}' is missing.") + if len(indexed) + len(unindexed) != len(parameters): + raise ValueError("Invalid keys count.") + elif isinstance(parameters, Sequence): + unindexed = [ + v + for v, p in izip(parameters, self._definition["inputs"]) + if not p["indexed"] + ] + indexed = [ + v + for v, p in izip(parameters, self._definition["inputs"]) + if p["indexed"] + ] + else: + raise TypeError("Sequence or mapping of parameters expected.") + return (self.encode(indexed), self.encode_data(unindexed)) - def decode(self, data: bytes, topics: List[bytes]): - ''' Decode "data" according to the "topic"s. + def decode( + self, + data: bytes, + topics: Optional[Sequence[Optional[bytes]]] = None, + ) -> FunctionResult: + r"""Decode "data" according to the "topic"s. One output can contain an array of logs. + + .. versionchanged:: 2.0.0 + Return type is not a dict anymore. + + Parameters + ---------- + data : bytes + Data to decode. + It should be ``b'\x00'`` for event without unindexed parameters. + topics : Sequence[bytes or None], optional + Sequence of topics. + Fill unknown or not important positions with :class:`None`, + it will be preserved. + + :class:`None` is interpreted like list of proper length where + all items (except signature, if needed) are :class:`None`. + + Returns + ------- + FunctionResult + Decoded data. + + Raises + ------ + ValueError + If topics count does not match the number of indexed parameters. + + Notes + ----- One log contains mainly 3 entries: - - For a non-indexed parameters event: + - For a non-indexed parameters event:: - "address": The emitting contract address. + "address": "The emitting contract address", "topics": [ "signature of event" - ] - "data": "0x..." (contains parameters value) + ], + "data": "0x..." # contains parameters values - - For an indexed parameters event: + - For an indexed parameters event:: - "address": The emitting contract address. + "address": "The emitting contract address", "topics": [ "signature of event", "indexed param 1", "indexed param 2", - ... - --> max 3 entries of indexed params. - ] - "data": "0x..." (remain un-indexed parameters value) - - If the event is "anonymous" then the signature is not inserted into the "topics" list, - hence topics[0] is not the signature. - ''' - if self._definition.get('anonymous', False) == False: + # ... + # --> max 3 entries of indexed params. + ], + "data": "0x..." # remaining unindexed parameters values + + If the event is "anonymous" then the signature is not inserted into + the "topics" list, hence ``topics[0]`` is not the signature. + + Examples + -------- + Decode indexed topic that is not hashed: + + >>> event = Event({ + ... 'inputs': [ + ... {'indexed': True, 'name': 'a1', 'type': 'bool'}, + ... ], + ... 'name': 'MyEvent', + ... 'type': 'event', + ... }) + >>> topics = [ + ... event.signature, # Not anonymous + ... b'\x01'.rjust(32, b'\x00'), # True as 32-byte integer + ... ] + >>> data = b'\x00' # No unindexed topics + >>> event.decode(data, topics).to_dict() + {'a1': True} + + Decode mix of indexed and unindexed parameters: + + >>> event = Event({ + ... 'inputs': [ + ... {'indexed': True, 'name': 't1', 'type': 'bool'}, + ... {'indexed': True, 'name': 't2', 'type': 'bool'}, + ... {'indexed': False, 'name': 'u1', 'type': 'string'}, + ... ], + ... 'name': 'MyEvent', + ... 'type': 'event', + ... 'anonymous': True, + ... }) + >>> topics = [ + ... b'\x01'.rjust(32, b'\x00'), # True as 32-byte integer + ... b'\x00'.rjust(32, b'\x00'), # False as 32-byte integer + ... ] + >>> data = ( + ... b'' + ... + b'\x20'.rjust(32, b'\x00') # address of first argument + ... + b'\x03'.rjust(32, b'\x00') # length of b'foo' + ... + b'foo'.ljust(32, b'\x00') # b'foo' + ... ) # string 'foo' encoded + >>> event.decode(data, topics).to_dict() + {'t1': True, 't2': False, 'u1': 'foo'} + + "Decode" hashed topic: + + >>> from thor_devkit.cry import keccak256 + >>> event = Event({ + ... 'inputs': [ + ... {'indexed': True, 'name': 't1', 'type': 'string'}, + ... ], + ... 'name': 'MyEvent', + ... 'type': 'event', + ... 'anonymous': True, + ... }) + >>> encoded_topic = b'foo'.ljust(32, b'\x00') + >>> topic = keccak256([encoded_topic])[0] + >>> assert event.decode(b'\x00', [topic]).t1 == topic + + Note that we don't get a string as output due to the nature of + indexed parameters. + + See Also + -------- + :meth:`Function.decode`: for examples of result usage. + """ + indexed_count = len(self.indexed_params) + if topics is None: + topics = [None] * indexed_count + elif not self.is_anonymous: # if not anonymous, topics[0] is the signature of event. # we cut it out, because we already have self.signature - topics = topics[1:] - - _indexed_params_definitions = [x for x in self._definition['inputs'] if x['indexed']] - _un_indexed_params_definitions = [x for x in self._definition['inputs'] if not x['indexed']] - - if len(_indexed_params_definitions) != len(topics): - raise Exception('topics count invalid.') - - un_indexed_params = Coder.decode_list( - [x['type'] for x in _un_indexed_params_definitions], - data + if not topics or topics[0] not in {self.signature, None}: + raise ValueError( + "First topic of non-anonymous event must be its signature" + ) + _, *topics = topics + + # Check topics count + topics_count = len(topics) + if indexed_count != topics_count: + raise ValueError( + f"Invalid topics count: expected {indexed_count}, got {topics_count}." + ) + + my_types = list(map(self.make_proper_type, self.unindexed_params)) + result_list = Coder.decode_list(my_types, data) + unindexed_params = ( + self.apply_recursive_names(value, typeinfo) + for typeinfo, value in izip(self.unindexed_params, result_list) ) - r = {} - for idx, each in enumerate(self._definition['inputs']): - to_be_stored = None - if each['indexed']: - topic = topics.pop(0) - if is_dynamic_type(each['type']): - to_be_stored = topic + inputs = self._definition["inputs"] + topics = iter(topics) + r: List[Any] = [] + for each in inputs: + if each["indexed"]: + topic = next(topics) + if self.is_dynamic_type(each["type"]) or topic is None: + r.append(topic) else: - to_be_stored = Coder.decode_single(each['type'], topic) + r.append(Coder.decode_single(each["type"], topic)) else: - to_be_stored = un_indexed_params.pop(0) + r.append(next(unindexed_params)) + + try: + next(unindexed_params) + except StopIteration: + pass + else: # pragma: no cover + raise ValueError("Wrong unindexed parameters count, internal error.") + + NewType = self._make_output_namedtuple_type("OutType", inputs) + return NewType(*r) + + @classmethod + def from_solidity( + cls, + *, + text: Optional[str] = None, + file: Optional[_PathT] = None, + name: Optional[str] = None, + version: Optional[str] = None, + ) -> "Event": + """Instantiate :class:`Event` from solidity definition. + + .. versionadded:: 2.0.0 - r[str(idx)] = to_be_stored + Parameters + ---------- + text: str or None (keyword-only) + Program text. + file: os.PathLike or Path or None (keyword-only) + File with program source. + name: str or None + Name of event to select. Do not filter by name if ``None``. + version: str or None (keyword-only) + Solidity version (supported by :func:`~solcx.install_solc`) + or ``None`` to use default. + + Raises + ------ + ValueError + If required type (event or function) cannot be uniquely extracted. + :exc:`~solcx.exceptions.SolcError` + If input is not a valid solidity code. + + See Also + -------- + :external+solcx:doc:`index`: underlying library reference. + + Examples + -------- + >>> from pprint import pprint + >>> contract = ''' + ... contract A { + ... event E(uint x) anonymous; + ... } + ... ''' + >>> ev = Event.from_solidity(text=contract) + >>> pprint(ev._definition) + {'anonymous': True, + 'inputs': [{'indexed': False, + 'internalType': 'uint256', + 'name': 'x', + 'type': 'uint256'}], + 'name': 'E', + 'type': 'event'} + + No matching events: + + >>> Event.from_solidity(text='contract A { function f(int x) public {} }') + Traceback (most recent call last): + ValueError: Missing value of expected type. + + Many matching events: + + >>> contract = ''' + ... contract A { + ... event E1(int x) anonymous; + ... event E2() ; + ... } + ... ''' + >>> Event.from_solidity(text=contract) + Traceback (most recent call last): + ValueError: Ambiguous input: more than one event given. + + Many matching events, use name: + + >>> ev = Event.from_solidity(text=contract, name='E2') + >>> pprint(ev._definition) + {'anonymous': False, 'inputs': [], 'name': 'E2', 'type': 'event'} + + Syntax error: + + >>> Event.from_solidity( + ... text='contract A { event E() {} }' + ... ) # doctest:+IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + solcx.exceptions.SolcError: An error occurred during execution + """ + return super().from_solidity(text=text, file=file, name=name, version=version) + + @deprecated_to_property + def get_signature(self) -> bytes: + """Get signature. - if each['name']: - r[each['name']] = to_be_stored + .. customtox-exclude:: - return r \ No newline at end of file + .. deprecated:: 2.0.0 + Use :attr:`signature` property instead + """ + return self.signature diff --git a/thor_devkit/bloom.py b/thor_devkit/bloom.py index d6c4d01..4ffeba6 100644 --- a/thor_devkit/bloom.py +++ b/thor_devkit/bloom.py @@ -1,32 +1,48 @@ -''' -Bloom Filter. +"""Bloom filter implementation. -A data structure tells us either the element definitely is not in, -or may be in the set. +Bloom filter is a probabilistic data structure that is used to check +whether the element definitely is not in set or may be in the set. -Instead of a traditional hash-based set takes up too much memory, +Instead of a traditional hash-based set, that takes up too much memory, this structure permits less memory with a tolerable false positive rate. -m = total bits of the filter. -k = how many different hash functions to use. -n = number of elements to be added to the filter. +Used variables: -2048 bits / 256 bytes -''' +:``m``: Total bits of the filter. + +:``k``: How many different hash functions to use. + +:``n``: Number of elements to be added to the filter. + +This implementation uses 2048 bits / 256 bytes of storage. +You can override it in a subclass. +""" import math -from typing import Callable -from .cry import blake2b256 +import sys +from typing import Callable, Optional + +from thor_devkit.cry import blake2b256 + +if sys.version_info < (3, 8): + from typing_extensions import Literal +else: + from typing import Literal + +__all__ = ["Bloom"] class Bloom: - MAX_K = 16 - BITS_LENGTH = 2048 + """Bloom filter.""" + + MAX_K: int = 16 + """Maximal amount of hash functions to use.""" + + BITS_LENGTH: int = 2048 + """Filter size in bits.""" @classmethod def estimate_k(cls, count: int) -> int: - ''' - Estimate the k based on the number of elements - to be inserted into bloom filter. + """Estimate the k based on expected elements count. Parameters ---------- @@ -37,33 +53,31 @@ def estimate_k(cls, count: int) -> int: ------- int The estimated k. - ''' + """ k = round(cls.BITS_LENGTH / count * math.log(2)) return max(min(k, cls.MAX_K), 1) - def __init__(self, k: int, bits: bytes = None): - ''' - Construct a bloom filter. - k is the number of different hash functions. - + def __init__(self, k: int, bits: Optional[bytes] = None): + """Construct a bloom filter. Parameters ---------- k : int The number of different hash functions to use. - bits : bytes, optional - previous bloom filter to inherit, by default None. - Leave it None to create an empty bloom filter. - ''' - self.k = k + bits : Optional[bytes], optional + Bits of previous bloom filter to inherit. + Leave it :class:`None` to create an empty bloom filter. + """ + self.k: int = k + """The number of different hash functions used.""" if bits is None: - self.bits = bytes(self.BITS_LENGTH//8) + self.bits: bytes = bytes(self.BITS_LENGTH // 8) + """Actual storage.""" else: self.bits = bits def _distribute(self, element: bytes, tester: Callable[[int, int], bool]) -> bool: - ''' - Distribute the element into the bloom filter. + """Distribute the element into the bloom filter. Parameters ---------- @@ -75,20 +89,19 @@ def _distribute(self, element: bytes, tester: Callable[[int, int], bool]) -> boo Returns ------- bool - True/False if element is inside during testing, - or True when adding element. - ''' + ``True``/``False`` if element is inside during testing, + or ``True`` when adding element. + """ h, _ = blake2b256([element]) - for x in range(0, self.k): + for x in range(self.k): d = (h[x * 2 + 1] + (h[x * 2] << 8)) % self.BITS_LENGTH bit = 1 << (d % 8) if not tester(int(d / 8), bit): return False return True - def add(self, element: bytes) -> bool: - ''' - Add an element to the bloom filter. + def add(self, element: bytes) -> Literal[True]: + """Add an element to the bloom filter. Parameters ---------- @@ -97,20 +110,21 @@ def add(self, element: bytes) -> bool: Returns ------- - bool - True - ''' - def t(index: int, bit: int): + Literal[True] + Always ``True`` + """ + + def t(index: int, bit: int) -> Literal[True]: temp = list(self.bits) temp[index] = temp[index] | bit self.bits = bytes(temp) return True - return self._distribute(element, t) + assert self._distribute(element, t) + return True def test(self, element: bytes) -> bool: - ''' - Test if element is inside the bloom filter. + """Test if element is inside the bloom filter. Parameters ---------- @@ -120,9 +134,39 @@ def test(self, element: bytes) -> bool: Returns ------- bool - True if inside, False if not inside. - ''' - def t(index: int, bit: int): + ``True`` if inside, ``False`` if not inside. + + Warning + ------- + If ``False`` is returned, then element is **sure** not in filter. + + If ``True`` is returned, then element **may be** in filter, there is no way + to determine it surely. + """ + + def t(index: int, bit: int) -> bool: return (self.bits[index] & bit) == bit return self._distribute(element, t) + + def __contains__(self, element: bytes) -> bool: + """Test if element is inside the bloom filter. + + Parameters + ---------- + element : bytes + The element in bytes. + + Returns + ------- + bool + ``True`` if inside, ``False`` if not inside. + + Warning + ------- + If ``False`` is returned, then element is **sure** not in filter. + + If ``True`` is returned, then element **may be** in filter, there is no way + to determine it surely. + """ + return self.test(element) diff --git a/thor_devkit/certificate.py b/thor_devkit/certificate.py index fb9281a..16ab05c 100644 --- a/thor_devkit/certificate.py +++ b/thor_devkit/certificate.py @@ -1,116 +1,282 @@ -''' -User signed certificate. +"""User signed certificate. -https://github.com/vechain/VIPs/blob/master/vips/VIP-192.md -''' -from typing import Optional +Implemented according to +`VIP192 `_ +""" import json -import re -import copy -from .cry import blake2b256 -from .cry import secp256k1 -from .cry import address +import sys +from typing import Optional + +import voluptuous +from voluptuous import Schema + +from thor_devkit.cry import blake2b256, secp256k1 +from thor_devkit.cry.address import public_key_to_address + +# Re-export, it was public interface +from thor_devkit.cry.utils import safe_tolowercase as safe_tolowercase +from thor_devkit.deprecation import renamed_function +from thor_devkit.exceptions import BadSignature +from thor_devkit.validation import address_type, hex_integer + +if sys.version_info < (3, 8): + from typing_extensions import Final, Literal, TypedDict +else: + from typing import Final, Literal, TypedDict +if sys.version_info < (3, 11): + from typing_extensions import NotRequired +else: + from typing import NotRequired + +__all__ = [ + # Main + "Certificate", + # Types + "PayloadT", + "CertificateT", + # Schemas + "PAYLOAD", + "CERTIFICATE", +] + + +PAYLOAD: Final = Schema( + { + "type": str, + "content": str, + }, + required=True, +) +""" +Validation :external:class:`~voluptuous.schema_builder.Schema` for certificate payload. + +:meta hide-value: + +.. versionadded:: 2.0.0 +""" + + +class PayloadT(TypedDict): + """Type of Certificate ``payload`` parameter. + + .. versionadded:: 2.0.0 + """ + + type: str # noqa: A003 + """Payload type.""" + content: str + """Payload content.""" + + +CERTIFICATE: Final = Schema( + { + "purpose": voluptuous.Any("identification", "agreement"), + "payload": PAYLOAD, + "domain": str, + "timestamp": int, + "signer": address_type(), + voluptuous.Optional("signature"): hex_integer(130), + }, + required=True, +) +""" +Validation :external:class:`~voluptuous.schema_builder.Schema` for certificate payload. + +:meta hide-value: + +.. versionadded:: 2.0.0 +""" + + +class CertificateT(TypedDict): + """Type of Certificate body dictionary. + .. versionadded:: 2.0.0 + """ + + purpose: Literal["identification", "agreement"] + """Purpose of certificate, can be ``identification`` or ``agreement``. + + Usage scenarios: + + Identification + Request user to proof that he/she is the private key holder. + + In this scenario payload is not essential to the user. + Agreement + Request user to agree with an agreement by using user's private key to sign. + + In this scenario payload should contain the content such as Privacy policy + and it is essential to the user. + + Use cases may be extended in future, see VIP192_ for details. + """ + payload: PayloadT + """Certificate payload.""" + domain: str + """Domain for which certificate was issued.""" + timestamp: int + """Issue time.""" + signer: str + """Signer address, in ``0x...`` format.""" + signature: NotRequired[str] + """ + Signature in ``0x...`` format, 65 bytes + (as from :func:`cry.secp256k1.secp256k1.sign`). + """ + + +class Certificate: + """User signed certificate.""" -class Certificate(): def __init__( self, - purpose: str, - payload: dict, + purpose: Literal["identification", "agreement"], + payload: PayloadT, domain: str, timestamp: int, signer: str, - signature: Optional[str] = None + signature: Optional[str] = None, ): - ''' - Certficate itself. + """Instantiate certificate from parameters. + + .. versionchanged:: 2.0.0 + :exc:`ValueError` not raised anymore, :exc:`~voluptuous.error.Invalid` + is used instead. Parameters ---------- purpose : str - A String. - payload : dict - Of style { "type": str, "content": str} + Certificate purpose. + payload : PayloadT + Dictionary of style { "type": str, "content": str} domain : str - A String + Certificate domain. timestamp : int - Integer, Unix timestamp. + Integer Unix timestamp. signer : str - 0x... the signer address. - signature : Optional[str], optional - A secp256k1 signed bytes, but turned into a '0x' + bytes.hex() format, by default None - ''' - if not payload.get('type'): - raise ValueError('payload needs a string field "type"') - if not payload.get('content'): - raise ValueError('payload needs a string field "content"') - - self.obj = { - 'purpose': purpose, - 'payload': payload, - 'domain': domain, - 'timestamp': timestamp, - 'signer': signer + The signer address with ``0x`` prefix. + signature : Optional[str], optional, default: None + A ``secp256k1`` signed bytes, but turned into a + ``'0x' + bytes.hex()`` format. + + Raises + ------ + :exc:`~voluptuous.error.Invalid` + When ``payload`` dictionary is malformed or parameters given are invalid. + """ + # Validate + payload = PAYLOAD(payload) + + body: CertificateT = { + "purpose": purpose, + "payload": payload, + "domain": domain, + "timestamp": timestamp, + "signer": signer, } if signature: - self.obj['signature'] = signature + body["signature"] = signature - def to_dict(self): - return self.obj + # Validate and normalize + self._body: CertificateT = CERTIFICATE(body) + def to_dict(self) -> CertificateT: + """Export certificate body as dictionary.""" + return self._body.copy() -def safe_tolowercase(s: str): - if type(s) == str: - return s.lower() - else: - return s + def encode(self) -> str: + """ + Encode a certificate into json. + .. versionadded:: 2.0.0 -def encode(cert: Certificate) -> str: - ''' - Encode a certificate into json. + Returns + ------- + str + The encoded string. + """ + data = self.to_dict() + data["signer"] = safe_tolowercase(data["signer"]) + sig = data.get("signature") + if sig: + data["signature"] = safe_tolowercase(sig) + + # separators=(',', ':') -> no whitespace compact string + # sort_keys -> dict key is ordered. + return json.dumps(data, separators=(",", ":"), sort_keys=True) + + def verify(self) -> Literal[True]: + """Verify the signature of certificate. + + .. versionadded:: 2.0.0 + + Raises + ------ + BadSignature + Signature does not match. + ValueError + Signature is absent or malformed. - Parameters - ---------- - cert : Certificate - The certificate to be encoded. + Returns + ------- + Literal[True] + Always True. + """ + data = self.to_dict() - Returns - ------- - str - The encoded string. - ''' - temp = cert.to_dict() - temp['signer'] = safe_tolowercase(temp['signer']) - if temp.get('signature'): - temp['signature'] = safe_tolowercase(temp['signature']) + # remove the signature, then encode. + sig = data.pop("signature", "") + if not sig: + raise ValueError('the certificate needs a "signature" field.') - # separators=(',', ':') -> no whitespace compact string - # sort_keys -> dict key is ordered. - return json.dumps(temp, separators=(',', ':'), sort_keys=True) + the_encoded = Certificate(**data).encode() + signing_hash, _ = blake2b256([the_encoded.encode()]) + pub_key = secp256k1.recover(signing_hash, bytes.fromhex(sig[2:])) + signer = data["signer"] + if "0x" + public_key_to_address(pub_key).hex() != safe_tolowercase(signer): + raise BadSignature + return True -SIGNATURE_PATTERN = re.compile('^0x[0-9a-f]+$', re.I) + def is_valid(self) -> bool: + """Check if the signature of certificate is valid. + + .. versionadded:: 2.0.0 + + Returns + ------- + bool + Whether signature is valid. + """ + try: + return self.verify() + except (ValueError, BadSignature): + return False + + +@renamed_function("Certificate.encode") +def encode(cert: Certificate) -> str: + """Encode a certificate into json. + .. customtox-exclude:: -def verify(cert: Certificate): - temp = cert.to_dict() - if not temp.get('signature'): - raise ValueError('Cert needs a "signature" field.') + .. deprecated:: 2.0.0 + :func:`encode` module-level function is replaced by + :meth:`Certificate.encode` method to conform with OOP standards. + """ + return cert.encode() - sig = copy.copy(temp['signature']) - if len(sig) % 2 != 0: - raise ValueError('Cert "signature" field needs to be of even length.') - if not SIGNATURE_PATTERN.match(sig): - raise ValueError('Cert "signature" field can not pass the style check') +@renamed_function("Certificate.verify") +def verify(cert: Certificate) -> Literal[True]: + """Verify certificate signature. - # remove the signature, then encode. - del temp['signature'] - the_encoded = encode(Certificate(**temp)) - signing_hash, _ = blake2b256([the_encoded.encode('utf-8')]) - pub_key = secp256k1.recover(signing_hash, bytes.fromhex(sig[2:])) + .. customtox-exclude:: - if '0x' + address.public_key_to_address(pub_key).hex() != safe_tolowercase(temp['signer']): - raise Exception('signature does not match with the signer.') + .. deprecated:: 2.0.0 + :func:`verify` module-level function is replaced by + :meth:`Certificate.verify` method to conform with OOP standards. + """ + return cert.verify() diff --git a/thor_devkit/cry/__init__.py b/thor_devkit/cry/__init__.py index e0c2b8c..0d93899 100644 --- a/thor_devkit/cry/__init__.py +++ b/thor_devkit/cry/__init__.py @@ -1,4 +1,14 @@ +"""Common utilities for VeChain development.""" from .address import is_address, public_key_to_address, to_checksum_address from .blake2b import blake2b256 from .hdnode import HDNode from .keccak import keccak256 + +__all__ = [ + "is_address", + "public_key_to_address", + "to_checksum_address", + "blake2b256", + "keccak256", + "HDNode", +] diff --git a/thor_devkit/cry/address.py b/thor_devkit/cry/address.py index 44b76d6..62f3aa3 100644 --- a/thor_devkit/cry/address.py +++ b/thor_devkit/cry/address.py @@ -1,30 +1,32 @@ -''' -Address Module. +"""VeChain "public key" and "address" related operations and verifications.""" -VeChain "public key" and "address" related operations and verifications. -''' +from voluptuous.error import Invalid -import re -from .keccak import keccak256 -from .utils import remove_0x, is_uncompressed_public_key +from thor_devkit.cry.keccak import keccak256 +from thor_devkit.cry.utils import remove_0x, validate_uncompressed_public_key +from thor_devkit.validation import address_type + +__all__ = [ + "public_key_to_address", + "is_address", + "to_checksum_address", +] def public_key_to_address(key_bytes: bytes) -> bytes: - ''' - Derive an address from a public key - (uncompressed, starts with 0x04). + """Derive an address from a public key. Parameters ---------- key_bytes : bytes - bytes that represent a public key. + public key (uncompressed, starts with ``0x04``). Returns ------- bytes bytes that represents the address. - ''' - is_uncompressed_public_key(key_bytes) + """ + validate_uncompressed_public_key(key_bytes) # Get rid of the 0x04 (first byte) at the beginning. buffer = key_bytes[1:] # last 20 bytes from the 32 bytes hash. @@ -32,35 +34,32 @@ def public_key_to_address(key_bytes: bytes) -> bytes: def is_address(address: str) -> bool: - ''' - Check if a text string is valid address. + """Check if a text string is valid address. Parameters ---------- address : str - The address string to be checked. Should begin with '0x'. + The address string to be checked. Should begin with ``0x``. Returns ------- bool - If it is valid address. - ''' - - c = re.compile('^0x[0-9a-f]{40}$', re.I) - if c.match(address): + Whether given address is valid. + """ + try: + address_type()(address) return True - else: + except Invalid: return False def to_checksum_address(address: str) -> str: - ''' - Turn an address to a checksum address that is compatible with eip-55. + """Turn an address to a checksum address that is compatible with eip-55. Parameters ---------- address : str - The address string. Should begin with '0x'. + The address string. Should begin with ``0x``. Returns ------- @@ -70,23 +69,22 @@ def to_checksum_address(address: str) -> str: Raises ------ ValueError - If the address isn't a valid address itself. - ''' - + If the address is not valid. + """ if not is_address(address): - raise ValueError('The address is not valid.') + raise ValueError("The address is not valid.") - body = remove_0x(address) # remove '0x'. + body = remove_0x(address) # remove ``0x``. body = body.lower() h, _ = keccak256([body.encode("ascii")]) - hash = h.hex() + hash_ = h.hex() - parts = ['0x'] + parts = ["0x"] for idx, value in enumerate(body): - if int(hash[idx], 16) >= 8: + if int(hash_[idx], 16) >= 8: parts.append(value.upper()) else: parts.append(value) - return ''.join(parts) + return "".join(parts) diff --git a/thor_devkit/cry/blake2b.py b/thor_devkit/cry/blake2b.py index a797839..1214de7 100644 --- a/thor_devkit/cry/blake2b.py +++ b/thor_devkit/cry/blake2b.py @@ -1,26 +1,32 @@ -''' -Blake2b - -Blake2b hash function. -''' +"""Blake2b hash function.""" import hashlib # python3 lib/hashlib -from typing import List, Tuple +from typing import Iterable, Tuple + +__all__ = ["blake2b256"] -def blake2b256(list_of_bytes: List[bytes]) -> Tuple[bytes, int]: - ''' - Computes a hash in black2b flavor, the output is 256 bits / 32 bytes. +def blake2b256(list_of_bytes: Iterable[bytes]) -> Tuple[bytes, int]: + """Compute a hash in black2b flavor. Parameters ---------- - list_of_bytes : List[bytes] - The list of bytes, waited to be hashed. + list_of_bytes : Iterable of bytes + The iterable of :class:`bytes` or :class:`bytearray`'s to be hashed. Returns ------- Tuple[bytes, int] - Hash result in bytes and the length of bytes (32). - ''' + Hash result in :class:`bytes` (32 bytes) and the length of bytes (32). + + Raises + ------ + TypeError + If argument type is wrong. + """ + if isinstance(list_of_bytes, (bytes, bytearray)): # type: ignore[unreachable] + raise TypeError( + f"Expected iterable of bytes or bytearray's, got: {type(list_of_bytes)}" + ) m = hashlib.blake2b(digest_size=32) for item in list_of_bytes: diff --git a/thor_devkit/cry/hdnode.py b/thor_devkit/cry/hdnode.py index 1b646fb..78e9765 100644 --- a/thor_devkit/cry/hdnode.py +++ b/thor_devkit/cry/hdnode.py @@ -1,132 +1,202 @@ -''' -HD nodes, HD wallets. +"""Hierarchically deterministic wallets for VeChain. -Hierarchically Deterministic Wallets for VeChain. +Relevant information: BIP32_ and BIP44_. -Relevant information: BIP32 and BIP44. -BIP32: https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki -BIP44: https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki +`BIP-44 `_ specified path notation: -BIP-44 specified path notation: -m / purpose' / coin_type' / account' / change / address_index +.. code-block:: text + + m / purpose' / coin_type' / account' / change / address_index Derive path for the VET: -m / 44' / 818' / 0' / 0 / -So the following is the root of the "external" node chain for VET. +.. code-block:: text + + m / 44' / 818' / 0' / 0 / address_index + +So the following is the root of the "external" node chain for VET: + +.. code-block:: text + + m / 44' / 818' / 0' / 0 + +``m`` is the master key, which shall be generated from a seed. + +The following is the "first" key pair on the "external" node chain: -m / 44' / 818' / 0' / 0 +.. code-block:: text -m is the master key, which shall be generated from a seed. + m / 44' / 818' / 0' / 0 / 0 -The following is the "first" key pair on the "external" node chain. +.. _BIP32: https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki +.. _BIP44: https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki +""" +import sys +from typing import Iterable, Type, TypeVar -m / 44' / 818' / 0' / 0 / 0 +from bip_utils import Base58Encoder + +try: + from bip_utils import Bip32Secp256k1 as Bip32 + + IS_OLD_BIP_UTILS = False +except ImportError: + from bip_utils import Bip32 + + IS_OLD_BIP_UTILS = True -''' -from typing import List -from .mnemonic import derive_seed -from .address import public_key_to_address -from .utils import strip_0x04 -from bip_utils import Bip32, Bip32Utils, Base58Encoder from eth_keys import KeyAPI +from thor_devkit.cry.address import public_key_to_address +from thor_devkit.cry.mnemonic import derive_seed +from thor_devkit.cry.utils import strip_0x04 + +if sys.version_info < (3, 8): + from typing_extensions import Final +else: + from typing import Final + + +__all__ = [ + "VET_EXTERNAL_PATH", + "HDNode", +] -VET_EXTERNAL_PATH = "m/44'/818'/0'/0" +# BIP-44 specified path notation: +# m / purpose' / coin_type' / account' / change / address_index -VERSION_MAINNET_PUBLIC = bytes.fromhex('0488B21E') -VERSION_MAINNET_PRIVATE = bytes.fromhex('0488ADE4') -DEPTH_MASTER_NODE = bytes.fromhex('00') -FINGER_PRINT_MASTER_KEY = bytes.fromhex('00000000') -CHILD_NUMBER_MASTER_KEY = bytes.fromhex('00000000') +VET_EXTERNAL_PATH: Final = "m/44'/818'/0'/0" +"""Prefix of path for the VET. +``address_index`` is appended to this string for derivation +""" -class HDNode(): - ''' - HD Node that is able to derive child HD Node. +VERSION_MAINNET_PUBLIC: Final = bytes.fromhex("0488B21E") +"""Version bytes for public main network.""" +VERSION_MAINNET_PRIVATE: Final = bytes.fromhex("0488ADE4") +"""Version bytes for private main network.""" +DEPTH_MASTER_NODE: Final = bytes.fromhex("00") +"""Depth for master node.""" +FINGER_PRINT_MASTER_KEY: Final = bytes.fromhex("00000000") +"""Fingerprint of a master key.""" +CHILD_NUMBER_MASTER_KEY: Final = bytes.fromhex("00000000") +"""Child number of a master key.""" +_Self = TypeVar("_Self", bound="HDNode") + + +class HDNode: + """Hierarchically deterministic (HD) node that is able to derive child HD Node. + + Note + ---- Please use static methods provided in this class to construct new instances rather than instantiate one by hand. - ''' + """ + + VERSION_MAINNET_PUBLIC: bytes = VERSION_MAINNET_PUBLIC + """Version bytes for public main network. + + .. versionadded:: 2.0.0 + """ + VERSION_MAINNET_PRIVATE: bytes = VERSION_MAINNET_PRIVATE + """Version bytes for private main network. + + .. versionadded:: 2.0.0 + """ + DEPTH_MASTER_NODE: bytes = DEPTH_MASTER_NODE + """Depth for master node. + + .. versionadded:: 2.0.0 + """ + FINGER_PRINT_MASTER_KEY: bytes = FINGER_PRINT_MASTER_KEY + """Fingerprint of a master key. + + .. versionadded:: 2.0.0 + """ + CHILD_NUMBER_MASTER_KEY: bytes = CHILD_NUMBER_MASTER_KEY + """Child number of a master key. + + .. versionadded:: 2.0.0 + """ + + def __init__(self, bip32_ctx: Bip32) -> None: + """Class constructor, it is not recommended to use this directly. - def __init__(self, bip32_ctx: Bip32): - ''' - HDNode constructor, it is not recommended to use this directly. To construct an HDNode, use staticmethods below instead. Parameters ---------- bip32_ctx : Bip32 - ''' + Context to build node from. + """ + self.bip32_ctx: Bip32 = bip32_ctx - self.bip32_ctx = bip32_ctx + @classmethod + def from_seed( + cls: Type[_Self], seed: bytes, init_path: str = VET_EXTERNAL_PATH + ) -> _Self: + """Construct an HD Node from a seed (64 bytes). - @staticmethod - def from_seed(seed: bytes, init_path=VET_EXTERNAL_PATH): - ''' - Construct an HD Node from a seed (64 bytes). - The init_path is m/44'/818'/0'/0 for starting. - or you can simply put in 44'/818'/0'/0 + The seed will be further developed into an "m" secret key and "chain code". - Note - ---- - The seed will be further developed into - a "m" secret key and "chain code". + .. versionchanged:: 2.0.0 + Is ``classmethod`` now. Parameters ---------- seed : bytes Seed itself. - init_path : str, optional - The derive path, by default VET_EXTERNAL_PATH + init_path : str, default: :const:`VET_EXTERNAL_PATH` + The initial derivation path Returns ------- HDNode A new HDNode. - ''' + """ bip32_ctx = Bip32.FromSeedAndPath(seed, init_path) - return HDNode(bip32_ctx) + return cls(bip32_ctx) + + @classmethod + def from_mnemonic( + cls: Type[_Self], words: Iterable[str], init_path: str = VET_EXTERNAL_PATH + ) -> _Self: + """Construct an HD Node from a mnemonic (set of words). - @staticmethod - def from_mnemonic(words: List[str], init_path=VET_EXTERNAL_PATH): - ''' - Construct an HD Node from a set of words. - The init_path is m/44'/818'/0'/0 by default on VeChain. + The words will generate a seed, which will be further developed into + an "m" secret key and "chain code". - Note - ---- - The words will generate a seed, - which will be further developed into - a "m" secret key and "chain code". + .. versionchanged:: 2.0.0 + Is ``classmethod`` now. Parameters ---------- - words : List[str] + words : Iterable of str Mnemonic words, usually 12 words. - init_path : str, optional - The initial derivation path, by default VET_EXTERNAL_PATH + init_path : str, default: :const:`VET_EXTERNAL_PATH` + The initial derivation path Returns ------- HDNode A new HDNode. - ''' - + """ seed = derive_seed(words) # 64 bytes bip32_ctx = Bip32.FromSeedAndPath(seed, init_path) - return HDNode(bip32_ctx) + return cls(bip32_ctx) + + @classmethod + def from_public_key(cls: Type[_Self], pub: bytes, chain_code: bytes) -> _Self: + """Construct an HD Node from an uncompressed public key. - @staticmethod - def from_public_key(pub: bytes, chain_code: bytes): - ''' - Construct an HD Node from an uncompressed public key. - (starts with 0x04 as first byte) + .. versionchanged:: 2.0.0 + Is ``classmethod`` now. Parameters ---------- pub : bytes - An uncompressed public key in bytes. + An uncompressed public key in bytes (starts with ``0x04`` as first byte). chain_code : bytes 32 bytes @@ -134,31 +204,34 @@ def from_public_key(pub: bytes, chain_code: bytes): ------- HDNode A new HDNode. - ''' - # parts - net_version = VERSION_MAINNET_PUBLIC - depth = DEPTH_MASTER_NODE - fprint = FINGER_PRINT_MASTER_KEY - index = CHILD_NUMBER_MASTER_KEY - chain = chain_code - key_bytes = KeyAPI.PublicKey(strip_0x04(pub)).to_compressed_bytes() - - # assemble - all_bytes = net_version + depth + fprint + index + chain + key_bytes + """ + all_bytes = b"".join( + [ + cls.VERSION_MAINNET_PUBLIC, + cls.DEPTH_MASTER_NODE, + cls.FINGER_PRINT_MASTER_KEY, + cls.CHILD_NUMBER_MASTER_KEY, + chain_code, + KeyAPI.PublicKey(strip_0x04(pub)).to_compressed_bytes(), + ] + ) + # double sha-256 checksum xpub_str = Base58Encoder.CheckEncode(all_bytes) bip32_ctx = Bip32.FromExtendedKey(xpub_str) - return HDNode(bip32_ctx) + return cls(bip32_ctx) + + @classmethod + def from_private_key(cls: Type[_Self], priv: bytes, chain_code: bytes) -> _Self: + """Construct an HD Node from a private key. - @staticmethod - def from_private_key(priv: bytes, chain_code: bytes): - ''' - Construct an HD Node from a private key. + .. versionchanged:: 2.0.0 + Is ``classmethod`` now. Parameters ---------- priv : bytes - The privte key in bytes. + The private key in bytes. chain_code : bytes 32 bytes of random number you choose. @@ -166,105 +239,137 @@ def from_private_key(priv: bytes, chain_code: bytes): ------- HDNode A new HDNode. - ''' - - # print('input priv', len(priv)) - # parts - net_version = VERSION_MAINNET_PRIVATE - depth = DEPTH_MASTER_NODE - fprint = FINGER_PRINT_MASTER_KEY - index = CHILD_NUMBER_MASTER_KEY - chain = chain_code - key_bytes = b'\x00' + priv - - # assemble - all_bytes = net_version + depth + fprint + index + chain + key_bytes + """ + all_bytes = b"".join( + [ + cls.VERSION_MAINNET_PRIVATE, + cls.DEPTH_MASTER_NODE, + cls.FINGER_PRINT_MASTER_KEY, + cls.CHILD_NUMBER_MASTER_KEY, + chain_code, + b"\x00" + priv, + ] + ) + # double sha-256 checksum xpriv = Base58Encoder.CheckEncode(all_bytes) bip32_ctx = Bip32.FromExtendedKey(xpriv) - return HDNode(bip32_ctx) + return cls(bip32_ctx) - def derive(self, index: int): - ''' - Derive the child HD Node from current HD Node. + def derive(self, index: int) -> "HDNode": + """Derive the child HD Node from current HD Node. - Note - ---- - private key -> private key. - private key -> public key. - public key -> public key. - public key -> private key. (CAN NOT!) + Possible derivation paths: + * private key -> private key + * private key -> public key + * public key -> public key + * public key -> private key (**impossible!**) Parameters ---------- index : int - Which key index (0,1,2... 2^32-1) to derive. + Which key index (``0 <= index < 2**32``) to derive. Returns ------- HDNode A New HDNode. - ''' - + """ bip32_ctx = self.bip32_ctx.DerivePath(str(index)) return HDNode(bip32_ctx) + @property def public_key(self) -> bytes: - ''' - Get current node's public key in uncompressed format bytes. - (starts with 0x04) + """Get current node's public key in uncompressed format bytes. + + .. versionchanged:: 2.0.0 + Regular method turned into property. Returns ------- bytes - The uncompressed public key. - ''' - return b'\x04' + self.bip32_ctx.PublicKey().RawUncompressed().ToBytes() + The uncompressed public key (starts with ``0x04``) + """ + pk = self.bip32_ctx.PublicKey().RawUncompressed().ToBytes() + return b"\x04" + strip_0x04(pk) + @property def private_key(self) -> bytes: - ''' - Get current node's private key in bytes format. - If this node was publicly derived, - then call this function may cause a Bip32KeyError exception. + """Get current node's private key in bytes format. + + .. versionchanged:: 2.0.0 + Regular method turned into property. Returns ------- bytes The private key in bytes. - ''' + + Raises + ------ + :external:exc:`~bip_utils.bip.bip32.bip32_ex.Bip32KeyError` + If node was publicly derived + """ return self.bip32_ctx.PrivateKey().Raw().ToBytes() - def chain_code(self) -> bytes: - ''' - Get the chaincode of current HD node. + if not IS_OLD_BIP_UTILS: - Returns - ------- - bytes - 32 bytes of chain code. - ''' - return self.bip32_ctx.Chain() + @property + def chain_code(self) -> bytes: + """Get the chain code of current HD node. + + .. versionchanged:: 2.0.0 + Regular method turned into property. + + Returns + ------- + bytes + 32 bytes of chain code. + """ + return self.bip32_ctx.ChainCode().ToBytes() + else: + + @property + def chain_code(self) -> bytes: + """Get the chain code of current HD node. + + .. versionchanged:: 2.0.0 + Regular method turned into property. + + Returns + ------- + bytes + 32 bytes of chain code. + """ + return self.bip32_ctx.Chain() + + @property def address(self) -> bytes: - ''' - Get the common address format. + """Get the common address format. + + .. versionchanged:: 2.0.0 + Regular method turned into property. Returns ------- bytes - The address in bytes. (without prefix 0x) - ''' - return public_key_to_address(self.public_key()) + The address in bytes. (without ``0x`` prefix) + """ + return public_key_to_address(self.public_key) + @property def finger_print(self) -> bytes: - ''' - Get the finger print of current HD Node public key. + """Get the finger print of current HD Node public key. + + .. versionchanged:: 2.0.0 + Regular method turned into property. Returns ------- bytes finger print in bytes. - ''' + """ return self.bip32_ctx.FingerPrint() diff --git a/thor_devkit/cry/keccak.py b/thor_devkit/cry/keccak.py index 0f42006..4967d72 100644 --- a/thor_devkit/cry/keccak.py +++ b/thor_devkit/cry/keccak.py @@ -1,27 +1,34 @@ -''' -Keccak - -Keccak hash function. -''' +"""Keccak hash function.""" +from typing import Iterable, Tuple import sha3 # pysha3 -from typing import List, Tuple + +__all__ = ["keccak256"] -def keccak256(list_of_bytes: List[bytes]) -> Tuple[bytes, int]: - ''' - Compute the sha3_256 flavor hash, outputs 256 bits / 32 bytes. +def keccak256(list_of_bytes: Iterable[bytes]) -> Tuple[bytes, int]: + """Compute the sha3_256 flavor hash. Parameters ---------- - list_of_bytes : List[bytes] + list_of_bytes : Iterable of bytes A list of bytes to be hashed. Returns ------- Tuple[bytes, int] - Hash value in bytes and length of bytes. - ''' + Hash value in :class:`bytes` (32 bytes) and length of bytes. + + Raises + ------ + TypeError + If ``bytes`` or ``bytearray`` is used instead of sequence as input. + """ + if isinstance(list_of_bytes, (bytes, bytearray)): # type: ignore[unreachable] + raise TypeError( + f"Expected sequence of bytes or bytearray's, got: {type(list_of_bytes)}" + ) + m = sha3.keccak_256() for item in list_of_bytes: m.update(item) diff --git a/thor_devkit/cry/keystore.py b/thor_devkit/cry/keystore.py index 7f4d145..ba31e5f 100644 --- a/thor_devkit/cry/keystore.py +++ b/thor_devkit/cry/keystore.py @@ -1,136 +1,347 @@ -''' -Keystore Module. +"""Key store module. -Encrypt, decrypt and verify a keystore. +Encrypt, decrypt and verify a key store. -The "keystore" dict should contain following format: +The "keystore" dict should contain following format:: -{ - address: string - crypto: object - id: string - version: number -} + { + address: string + crypto: object + id: string + version: number + } +""" +import sys +from typing import Union -''' -import re import eth_keyfile - -N = 131072 # aka. work_factor -P = 1 -R = 8 -DK_LEN = 32 -SALT_LEN = 16 - - -def encrypt(private_key: bytes, password: bytes) -> dict: - ''' - Encrypt a private key to a keystore. - The keystore is a json-style python dict. +import voluptuous +from voluptuous import Invalid, Schema + +from thor_devkit.deprecation import renamed_function +from thor_devkit.validation import address_type, hex_string + +if sys.version_info < (3, 8): + from typing_extensions import Final, Literal, TypedDict +else: + from typing import Final, Literal, TypedDict + +__all__ = [ + # Main + "encrypt", + "decrypt", + "validate", + "is_valid", + # Types + "AES128CTRCipherParamsT", + "PBKDF2ParamsT", + "ScryptParamsT", + "CryptoParamsT", + "KeyStoreT", + # Schemas + "AES128CTR_CIPHER_PARAMS", + "PBKDF2_PARAMS", + "SCRYPT_PARAMS", + "CRYPTO_PARAMS", + "KEYSTORE", +] + +SCRYPT_N: Final = 131072 +"""Work factor for scrypt.""" +SCRYPT_P: Final = 1 +"""Parallelism factor for scrypt.""" +SCRYPT_R: Final = 8 +"""Block size for scrypt.""" +DK_LEN: Final = 32 +"""Derived key length for scrypt.""" +SALT_LEN: Final = 16 +"""Salt length for scrypt.""" + + +AES128CTR_CIPHER_PARAMS: Final = Schema({"iv": str}, required=True) +""" +Validation :external:class:`~voluptuous.schema_builder.Schema` +for ``AES-128-CTR`` cipher parameters. + +:meta hide-value: + +.. versionadded:: 2.0.0 +""" + + +class AES128CTRCipherParamsT(TypedDict): + """Parameters for ``AES-128-CTR`` cipher. + + .. versionadded:: 2.0.0 + """ + + iv: str + """Internal parameter.""" + + +PBKDF2_PARAMS: Final = Schema( + { + "c": int, + "dklen": int, + "prf": "hmac-sha256", + "salt": hex_string(64), + }, + required=True, +) +""" +Validation :external:class:`~voluptuous.schema_builder.Schema` +for ``PBKDF2`` key derivation function parameters. + +:meta hide-value: + +.. versionadded:: 2.0.0 +""" + + +class PBKDF2ParamsT(TypedDict): + """Parameters for ``PBKDF2`` key derivation function. + + .. versionadded:: 2.0.0 + """ + + c: int + """Work factor.""" + dklen: int + """Derived key length.""" + prf: Literal["hmac-sha256"] + """Hash function to calculate HMAC.""" + salt: str + """Salt to use, hex string.""" + + +SCRYPT_PARAMS: Final = Schema( + { + "dklen": int, + "n": int, + "r": int, + "p": int, + "salt": hex_string(), + }, + required=True, +) +""" +Validation :external:class:`~voluptuous.schema_builder.Schema` +for ``scrypt`` key derivation function parameters. + +:meta hide-value: + +.. versionadded:: 2.0.0 +""" + + +class ScryptParamsT(TypedDict): + """Parameters for ``scrypt`` key derivation function. + + .. versionadded:: 2.0.0 + """ + + dklen: int + """Derived key length.""" + n: int + """Work factor.""" + r: int + """Block size.""" + p: int + """Parallelism factor.""" + salt: str + """Salt to use, hex string.""" + + +CRYPTO_PARAMS: Final = Schema( + { + "cipher": "aes-128-ctr", + "cipherparams": AES128CTR_CIPHER_PARAMS, + "ciphertext": hex_string(64), + "kdf": voluptuous.Any("scrypt", "pbkdf2"), + "kdfparams": voluptuous.Any(SCRYPT_PARAMS, PBKDF2_PARAMS), + "mac": hex_string(64), + }, + required=True, +) +""" +Validation :external:class:`~voluptuous.schema_builder.Schema` +for ``crypto`` certificate parameter. + +:meta hide-value: + +.. versionadded:: 2.0.0 +""" + + +class CryptoParamsT(TypedDict): + """Type of ``crypto`` parameter of key store. + + .. versionadded:: 2.0.0 + """ + + cipher: Literal["aes-128-ctr"] + """Cipher used. ``aes-128-ctr`` is the only supported.""" + cipherparams: AES128CTRCipherParamsT + """Parameters of used cipher.""" + ciphertext: str + """Encoded data, 64 characters long (32 bytes).""" + kdf: Literal["pbkdf2", "scrypt"] + """Key derivation function (other are not supported).""" + kdfparams: Union[PBKDF2ParamsT, ScryptParamsT] + """Parameters of key derivation function.""" + mac: str + """MAC (checksum variant), 64 characters long (32 bytes).""" + + +KEYSTORE: Final = Schema( + { + "address": address_type(), + "id": str, + "version": 3, + "crypto": CRYPTO_PARAMS, + }, + required=True, +) +""" +Validation :external:class:`~voluptuous.schema_builder.Schema` for key store body. + +:meta hide-value: + +.. versionadded:: 2.0.0 +""" + + +class KeyStoreT(TypedDict): + """Type of key store body dictionary. + + .. versionadded:: 2.0.0 + """ + + address: str + """Address used.""" + id: str # noqa: A003 + """36 chars, ``x{8}-x{4}-x{4}-x{4}-x{12}``, ``x`` is any hex digit.""" + version: Literal[3] + """Version used. Other are not supported.""" + crypto: CryptoParamsT + """Cryptography parameters.""" + + +def encrypt(private_key: bytes, password: Union[str, bytes]) -> KeyStoreT: + """Encrypt a private key to a key store. Parameters ---------- private_key : bytes A private key in bytes. - password : bytes + password : bytes or str A password. Returns ------- - dict - A keystore - ''' - return eth_keyfile.create_keyfile_json(private_key, password, 3, "scrypt", N) + KeyStoreT + A key store json-style dictionary. + """ + return _normalize( + eth_keyfile.create_keyfile_json(private_key, password, 3, "scrypt", SCRYPT_N) + ) -def decrypt(keystore: dict, password: bytes) -> bytes: - ''' - Decrypt a keystore into a private key (bytes). +def decrypt(keystore: KeyStoreT, password: Union[str, bytes]) -> bytes: + """Decrypt a keystore into a private key (bytes). Parameters ---------- - keystore : dict - A keystore. - password : bytes + keystore : KeyStoreT + A keystore dict. + password : bytes or str A password. Returns ------- bytes A private key in bytes. - ''' - return eth_keyfile.decode_keyfile_json(keystore, password) + """ + return eth_keyfile.decode_keyfile_json(_normalize(keystore), password) -def _normalize(keystore: dict) -> dict: - ''' - Normalize the keystore key:value pairs. - Make each value in lower case. +def _normalize(keystore: KeyStoreT) -> KeyStoreT: + """Normalize the key store key:value pairs. Parameters ---------- - keystore : dict - A keystore. + keystore : KeyStoreT + A key store dict. Returns ------- - dict - A keystore. - ''' - return keystore + KeyStoreT + A key store dict (normalized). + """ + return KEYSTORE(keystore) -ADDRESS_RE = re.compile('^[0-9a-f]{40}$', re.I) +def _validate(keystore: KeyStoreT) -> Literal[True]: + """Validate the format of a key store. + Raises + ------ + :exc:`voluptuous.error.Invalid` + If data not in good shape. + """ + KEYSTORE(keystore) + return True + + +@renamed_function("validate") +def well_formed(keystore: KeyStoreT) -> Literal[True]: + """Validate if the key store is in good shape (roughly). + + .. customtox-exclude:: + + .. deprecated:: 2.0.0 + Function :func:`well_formed` is deprecated for naming consistency. + Use :func:`validate` or :func:`is_valid` instead. + """ + return _validate(keystore) -def _validate(keystore: dict) -> bool: - ''' - Validate the format of a key store. + +def validate(keystore: KeyStoreT) -> Literal[True]: + """Validate if the key store is in good shape (roughly). Parameters ---------- - keystore : dict - A keystore. + keystore : KeyStoreT + A key store dict. Returns ------- - bool - True/False + Literal[True] + Always ``True`` for valid key store, raises otherwise. Raises ------ - ValueError - If is not in good shape then throw. - ''' - if keystore.get('version') != 3: - raise ValueError('unsupported version {}'.format(keystore.version)) - - if not ADDRESS_RE.match(keystore.get('address')): - raise ValueError( - 'invalid address {}, should be 40 characters and alphanumero.'.format(keystore.address)) - - if not keystore.get('id'): - raise ValueError('Need "id" field.') - - if not keystore.get('crypto'): - raise ValueError('Need "crypto" field.') - - return True + :exc:`voluptuous.error.Invalid` + If data not in good shape. + """ + # Extra "raises", because it is primary interface to private method that raises. + return _validate(keystore) -def well_formed(keystore: dict) -> bool: - ''' - Validate if the keystore is in good shape (roughly). +def is_valid(keystore: KeyStoreT) -> bool: + """Validate if the key store is in good shape (roughly). Parameters ---------- - keystore : dict - A keystore. + keystore : KeyStoreT + A key store dict. Returns ------- bool - True/False - ''' - - return _validate(keystore) + Whether key store dict is well-formed. + """ + try: + return _validate(keystore) + except Invalid: + return False diff --git a/thor_devkit/cry/mnemonic.py b/thor_devkit/cry/mnemonic.py index fbf38fd..3d41dbc 100644 --- a/thor_devkit/cry/mnemonic.py +++ b/thor_devkit/cry/mnemonic.py @@ -1,41 +1,80 @@ -''' -Mnemonic Module. +"""Mnemonic-related utilities. -Generate/Validate a words used for mnemonic wallet. +- Generate/validate a words used for mnemonic wallet. +- Derive the first private key from words. +- Derive the correct seed for BIP32_. -Derive the first private key from words. +Documentation: -Derive the correct seed for BIP32. -''' +- HD wallets: + `BIP32 `_ +- Mnemonic code: + `BIP39 `_ +""" + +import sys +from typing import Iterable, List, Tuple + +try: + from bip_utils import Bip32Secp256k1 as Bip32 + + IS_OLD_BIP_UTILS = False +except ImportError: + from bip_utils import Bip32 + + IS_OLD_BIP_UTILS = True -from typing import List from mnemonic import Mnemonic -from bip_utils import Bip32 -# BIP-44 specified path notation: -# m / purpose' / coin_type' / account' / change / address_index +from thor_devkit.deprecation import renamed_function + +if sys.version_info < (3, 8): + from typing_extensions import Final, Literal +else: + from typing import Final, Literal +if sys.version_info < (3, 10): + from typing_extensions import TypeAlias +else: + from typing import TypeAlias -# Derive path for the VET: -# m / 44' / 818' / 0' / 0 / -VET_PATH = "m/44'/818'/0'/0" +__all__ = [ + # Main + "generate", + "is_valid", + "derive_seed", + "derive_private_key", + # Types + "AllowedStrengthsT", + # Schemas + "ALLOWED_STRENGTHS", +] + + +AllowedStrengthsT: TypeAlias = Literal[128, 160, 192, 224, 256] +"""Allowed mnemonic strength literal type.""" + +ALLOWED_STRENGTHS: Final[Tuple[AllowedStrengthsT, ...]] = (128, 160, 192, 224, 256) +"""Allowed mnemonic strength options.""" def _get_key_path(base_path: str, index: int = 0) -> str: - return base_path.rstrip('/') + '/' + str(index) + return base_path.rstrip("/") + "/" + str(index) def _get_vet_key_path(index: int = 0) -> str: - return _get_key_path(VET_PATH, index) + # Prevent circular import + from thor_devkit.cry.hdnode import VET_EXTERNAL_PATH + + return _get_key_path(VET_EXTERNAL_PATH, index) -def generate(strength: int = 128) -> List[str]: - ''' - Generate BIP39 mnemonic words. +def generate(strength: AllowedStrengthsT = 128) -> List[str]: + """Generate BIP39_ mnemonic words. Parameters ---------- - strength : int, optional - Any of [128, 160, 192, 224, 256], by default 128 + strength : int, default: 128 + Any of [128, 160, 192, 224, 256] (:const:`ALLOWED_STRENGTHS`) Returns ------- @@ -45,74 +84,88 @@ def generate(strength: int = 128) -> List[str]: Raises ------ ValueError - If the strength is not of correct length. - ''' - if strength not in [128, 160, 192, 224, 256]: - raise ValueError( - 'strength should be one of [128, 160, 192, 224, 256].') + If the strength is not allowed. + """ + if strength not in ALLOWED_STRENGTHS: + raise ValueError(f"strength should be one of {ALLOWED_STRENGTHS}.") - sentence = Mnemonic('english').generate(strength) + sentence = Mnemonic("english").generate(strength) - return sentence.split(' ') + return sentence.split(" ") -def validate(words: List[str]) -> bool: - ''' - Check if the words form a valid BIP39 mnemonic words. +def is_valid(words: Iterable[str]) -> bool: + """Check if the words form a valid BIP39_ mnemonic words. + + .. versionadded:: 2.0.0 Parameters ---------- - words : List[str] + words : Iterable of str A list of english words. Returns ------- bool - True/False - ''' - sentence = ' '.join(words) - return Mnemonic('english').check(sentence) + Whether mnemonic is valid. + """ + sentence = " ".join(words) + return Mnemonic("english").check(sentence) + + +@renamed_function("is_valid") +def validate(words: Iterable[str]) -> bool: + """Check if the words form a valid BIP39_ mnemonic phrase. + + .. customtox-exclude:: + + .. deprecated:: 2.0.0 + Function :func:`validate` is deprecated for naming consistency. + Use :func:`is_valid` instead. There is no raising equivalent. + """ + return is_valid(words) -def derive_seed(words: List[str]) -> bytes: - ''' - Derive a seed from a word list. +def derive_seed(words: Iterable[str]) -> bytes: + """Derive a seed from a word list. Parameters ---------- - words : List[str] + words : Iterable of str A list of english words. Returns ------- bytes 64 bytes - ''' - if not validate(words): + + Raises + ------ + ValueError + Seed phrase is malformed. + """ + if not is_valid(words): raise ValueError("Input words doesn't pass validation check.") - sentence = ' '.join(words) - seed = Mnemonic.to_seed(sentence) # bytes. - return seed + sentence = " ".join(words) + return Mnemonic.to_seed(sentence) # bytes. -def derive_private_key(words: List[str], index: int = 0) -> bytes: - ''' - Get a private key from the mnemonic wallet, - default to the 0 index of the deviration. (first key) +def derive_private_key(words: Iterable[str], index: int = 0) -> bytes: + """Get a private key from the mnemonic wallet. Parameters ---------- - words : List[str] + words : Iterable of str A list of english words. - index : int, optional - The private key index, first private key., by default 0 + index : int, default: 0 + The private key index, starting from zero. Returns ------- bytes - [description] - ''' + Private key. + """ seed = derive_seed(words) bip32_ctx = Bip32.FromSeedAndPath(seed, _get_vet_key_path(index)) return bip32_ctx.PrivateKey().Raw().ToBytes() diff --git a/thor_devkit/cry/secp256k1.py b/thor_devkit/cry/secp256k1.py index e6e6966..5353edd 100644 --- a/thor_devkit/cry/secp256k1.py +++ b/thor_devkit/cry/secp256k1.py @@ -1,82 +1,148 @@ -''' -secp256k1 Elliptic Curve related functions. +"""Elliptic curve ``secp256k1`` related functions. -1) Generate a private Key. -2) Derive uncompressed public key from private key. -3) Sign a message hash using the private key, generate signature. -4) Given the message hash and signature, recover the uncompressed public key. -''' +- Generate a private key. +- Derive uncompressed public key from private key. +- Sign a message hash using the private key, generate signature. +- Given the message hash and signature, recover the uncompressed public key. +""" +import sys -from ecdsa import SigningKey, SECP256k1 +import eth_keys.exceptions +from ecdsa import SECP256k1, SigningKey from eth_keys import KeyAPI +from thor_devkit.deprecation import renamed_function -MAX = bytes.fromhex( - 'fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141') -ZERO = bytes.fromhex('0' * 64) +if sys.version_info < (3, 8): + from typing_extensions import Final, Literal +else: + from typing import Final, Literal +__all__ = [ + "is_valid_private_key", + "generate_private_key", + "derive_public_key", + "sign", + "recover", +] -def _is_valid_private_key(priv_key: bytes) -> bool: - ''' - Verify if a private key is good. +MAX: Final = bytes.fromhex( + "fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141" +) +"""Maximal allowed private key.""" + +ZERO: Final = bytes(32) +"""32-bit zero in bytes form.""" + + +def validate_private_key(priv_key: bytes) -> Literal[True]: + """Validate given private key. + + .. versionadded:: 2.0.0 Returns ------- - bool - True if the private key is valid. - ''' - if priv_key == ZERO: - return False + Literal[True] + Always True. + + Raises + ------ + ValueError + If key is not valid. + """ + try: + priv_key = bytes(priv_key) + except TypeError as e: + raise ValueError("Given key is not convertible to bytes.") from e + if priv_key == ZERO: + raise ValueError("Private key must not be zero.") if priv_key >= MAX: - return False - + raise ValueError("Private key must be less than MAX.") if len(priv_key) != 32: - return False - + raise ValueError("Length of private key must be equal to 32.") return True -def _is_valid_message_hash(msg_hash: bytes) -> bool: - ''' - Verify if a message hash is in correct format. - (as in terms of VeChain) +def is_valid_private_key(priv_key: bytes) -> bool: + """Verify if a private key is well-formed. + + .. versionadded:: 2.0.0 Parameters ---------- - msg_hash: bytes - The msg hash to be processed. + priv_key : bytes + Private key to check. Returns ------- bool - If the message hash is in correct format or not. - ''' - return len(msg_hash) == 32 + True if the private key is valid. + """ + try: + return validate_private_key(priv_key) + except ValueError: + return False + +def _validate_message_hash(msg_hash: bytes) -> Literal[True]: + """Verify if a message hash is in correct format (as in terms of VeChain). -def generate_privateKey() -> bytes: - ''' - Create a random number(32 bytes) as private key. + Parameters + ---------- + msg_hash : bytes + The message hash to be processed. + + Returns + ------- + bool + Whether the message hash is in correct format. + """ + if not isinstance(msg_hash, bytes): + raise ValueError("Message hash must be of type 'bytes'") + if len(msg_hash) != 32: + raise ValueError("Message hash must be 32 bytes long") + return True + + +def generate_private_key() -> bytes: + """Create a random number (32 bytes) as private key. + + .. versionadded:: 2.0.0 Returns ------- bytes The private key in 32 bytes format. - ''' + """ + # We shouldn't measure coverage here, because situation "key is invalid" + # is almost improbable while True: - _a = SigningKey.generate(curve=SECP256k1) - if _is_valid_private_key(_a.to_string()): - return _a.to_string() + _a = SigningKey.generate(curve=SECP256k1).to_string() + if is_valid_private_key(_a): # pragma: no cover + return _a + + +@renamed_function("generate_private_key") +def generate_privateKey() -> bytes: # noqa: N802 + """Create a random number (32 bytes) as private key. + + .. customtox-exclude:: + .. deprecated:: 2.0.0 + Use :func:`generate_private_key` instead for naming consistency. + """ + return generate_private_key() -def derive_publicKey(priv_key: bytes) -> bytes: - ''' - Derive public key from a private key(uncompressed). + +def derive_public_key(priv_key: bytes) -> bytes: + """Derive public key from a private key(uncompressed). + + .. versionadded:: 2.0.0 Parameters ---------- - priv_key: bytes + priv_key : bytes The private key in bytes. Returns @@ -89,24 +155,37 @@ def derive_publicKey(priv_key: bytes) -> bytes: ------ ValueError If the private key is not valid. - ''' - if not _is_valid_private_key(priv_key): - raise ValueError('Private Key not valid.') + """ + validate_private_key(priv_key) _a = SigningKey.from_string(priv_key, curve=SECP256k1) return _a.verifying_key.to_string("uncompressed") +@renamed_function("generate_public_key") +def derive_publicKey(priv_key: bytes) -> bytes: # noqa: N802 + """Create a random number (32 bytes) as public key. + + .. customtox-exclude:: + + .. deprecated:: 2.0.0 + Use :func:`derive_public_key` instead for naming consistency. + """ + return derive_public_key(priv_key) + + def sign(msg_hash: bytes, priv_key: bytes) -> bytes: - ''' - Sign the message hash. - (not the message itself) + """Sign the message hash. + + Note + ---- + It signs **message hash**, not the message itself! Parameters ---------- - msg_hash: bytes + msg_hash : bytes The message hash. - priv_key: bytes + priv_key : bytes The private key in bytes. Returns @@ -118,31 +197,27 @@ def sign(msg_hash: bytes, priv_key: bytes) -> bytes: ------ ValueError If the input is malformed. - ''' - if not _is_valid_message_hash(msg_hash): - raise ValueError('Message hash not valid.') - - if not _is_valid_private_key(priv_key): - raise ValueError('Private Key not valid.') + """ + _validate_message_hash(msg_hash) + validate_private_key(priv_key) sig = KeyAPI().ecdsa_sign(msg_hash, KeyAPI.PrivateKey(priv_key)) - r = sig.r.to_bytes(32, byteorder='big') - s = sig.s.to_bytes(32, byteorder='big') - v = sig.v.to_bytes(1, byteorder='big') # public key recovery bit. + r = sig.r.to_bytes(32, byteorder="big") + s = sig.s.to_bytes(32, byteorder="big") + v = sig.v.to_bytes(1, byteorder="big") # public key recovery bit. - return b''.join([r, s, v]) # 32 + 32 + 1 bytes + return b"".join([r, s, v]) # 32 + 32 + 1 bytes def recover(msg_hash: bytes, sig: bytes) -> bytes: - ''' - Recover the uncompressed public key from signature. + """Recover the uncompressed public key from signature. Parameters ---------- - msg_hash: bytes + msg_hash : bytes The message hash. - sig: bytes + sig : bytes The signature. Returns @@ -156,21 +231,16 @@ def recover(msg_hash: bytes, sig: bytes) -> bytes: If the signature is bad, or recovery bit is bad, or cannot recover(sig and msg_hash doesn't match). - ''' - - if not _is_valid_message_hash(msg_hash): - raise ValueError('Message Hash must be 32 bytes.') - - if len(sig) != 65: - raise ValueError('Signature must be 65 bytes.') + """ + _validate_message_hash(msg_hash) - if not (sig[64] == 0 or sig[64] == 1): - raise ValueError('Signature last byte must be 0 or 1') + # This validates signature + try: + signature = KeyAPI.Signature(signature_bytes=sig) + except (eth_keys.exceptions.BadSignature, eth_keys.exceptions.ValidationError) as e: + raise ValueError("Signature is invalid.") from e - pk = KeyAPI().ecdsa_recover( - msg_hash, - KeyAPI.Signature(signature_bytes=sig) - ) + pk = KeyAPI().ecdsa_recover(msg_hash, signature) # uncompressed should have first byte = 04 return bytes([4]) + pk.to_bytes() diff --git a/thor_devkit/cry/utils.py b/thor_devkit/cry/utils.py index 7f5a10f..7542b43 100644 --- a/thor_devkit/cry/utils.py +++ b/thor_devkit/cry/utils.py @@ -1,8 +1,75 @@ -''' Utils helping with hex<->string conversion and stripping ''' +"""Utils helping with ``hex <-> string`` conversion and stripping.""" +import contextlib +import sys +from functools import partial +from typing import TYPE_CHECKING, Any, Callable, Type, TypeVar, cast +from thor_devkit.deprecation import renamed_function -def strip_0x04(p: bytes): - ''' Strip the 0x04 off the starting of a byte sequence.''' +if sys.version_info < (3, 8): + from typing_extensions import Literal +else: + from typing import Literal + + +def _strict_zip(*iterables): # type: ignore[no-untyped-def] + """Implementation from `PEP618 `_.""" + if not iterables: + return + iterators = tuple(iter(iterable) for iterable in iterables) + with contextlib.suppress(StopIteration): + while True: + items = [] + for iterator in iterators: + items.append(next(iterator)) + yield tuple(items) + + if items: + i = len(items) + plural = " " if i == 1 else "s 1-" + msg = f"zip() argument {i+1} is shorter than argument{plural}{i}" + raise ValueError(msg) + sentinel = object() + for i, iterator in enumerate(iterators[1:], 1): + if next(iterator, sentinel) is not sentinel: + plural = " " if i == 1 else "s 1-" + msg = f"zip() argument {i+1} is longer than argument{plural}{i}" + raise ValueError(msg) + + +if TYPE_CHECKING: + # We don't have variadic generics yet (see PEP646, unsupported by mypy). + # Convince mypy that this is :func:`zip` itself. + izip = zip + r"""Implements ``python3.10+`` zip strict mode. + + In python 3.10 and higher it is an alias for ``partial(zip, strict=True)``. + + :meta hide-value: + + Parameters + ---------- + \*iterables: Iterable[Any] + Iterables to zip together. + + Yields + ------ + Tuple[Any, ...] + Tuples of values like standard :func:`zip` generates. + + Raises + ------ + ValueError + If not all iterables had equal length. + """ +elif sys.version_info < (3, 10): + izip = _strict_zip +else: + izip = partial(zip, strict=True) + + +def strip_0x04(p: bytes) -> bytes: + """Strip the ``0x04`` off the starting of a byte sequence 65 bytes long.""" if len(p) == 65 and p[0] == 4: return p[1:] else: @@ -10,51 +77,143 @@ def strip_0x04(p: bytes): def remove_0x(address: str) -> str: - ''' - Remove the 0x if any. Returns the string without 0x + """Remove the ``0x`` prefix if any. Parameters ---------- address : str - Address string, like 0xabc... + Address string, like ``0xabc``... Returns ------- str - Address string without prefix "0x" - ''' - + Address string without prefix ``0x`` + """ if address.startswith("0x") or address.startswith("0X"): return address[2:] else: return address -def is_uncompressed_public_key(key_bytes: bytes) -> bool: - ''' - Check if bytes is the uncompressed public key. +def validate_uncompressed_public_key(key_bytes: bytes) -> Literal[True]: + """Check if bytes is the uncompressed public key. Parameters ---------- - address : bytes - Address in bytes. Should be 65 bytes. + key_bytes : bytes + Address in bytes. Returns ------- - bool - True/False + Literal[True] + Always ``True`` if public key is valid, raises otherwise. Raises ------ ValueError - If address isn't 65 bytes. - ValueError - If address doesn't begin with 04 as first byte. - ''' + If address doesn't begin with ``04`` as first byte. + """ if len(key_bytes) != 65: - raise ValueError('Length should be 65 bytes.') + raise ValueError("Length should be 65 bytes.") if key_bytes[0] != 4: - raise ValueError('Should begin with 04 as first byte.') + raise ValueError("Should begin with 04 as first byte.") return True + + +def is_valid_uncompressed_public_key(key_bytes: bytes) -> bool: + """Check if bytes is the uncompressed public key. + + Parameters + ---------- + key_bytes : bytes + Address in bytes. + + Returns + ------- + bool + Whether input is uncompressed public key. + """ + try: + return validate_uncompressed_public_key(key_bytes) + except ValueError: + return False + + +@renamed_function("validate_uncompressed_public_key") +def is_uncompressed_public_key(key_bytes: bytes) -> Literal[True]: + """Check if bytes is the uncompressed public key. + + .. customtox-exclude:: + + .. deprecated:: 2.0.0 + Use :func:`is_valid_uncompressed_public_key` or + :func:`validate_uncompressed_public_key` instead. + """ + return validate_uncompressed_public_key(key_bytes) + + +_T = TypeVar("_T") + + +def safe_tolowercase(s: _T) -> _T: + """Lowercase input if it is string, return unchanged otherwise. + + Parameters + ---------- + s : str or Any + Value to process. + + Returns + ------- + str or Any + Lowercase value if it is a string, value unchanged otherwise. + """ + if isinstance(s, str): + # Cast, because mypy doesn't resolve TypeVar inside function body + return cast(_T, s.lower()) + else: + return s + + +_Class = TypeVar("_Class", bound=Type[Any]) + + +def _with_doc_mro(*bases: Type[Any]) -> Callable[[_Class], _Class]: + r"""Internal function for documentation enhancement. + + Designed use case: ``sphinx.ext.autosummary`` doesn't play well + with inheritance of :class:`~typing.TypedDict`. It throws errors + for every parent-defined key. This helper (and monkey-patching module, + of course) allows to overcome this. + + Parameters + ---------- + \*bases : Type[Any] + Classes you inherit from (and their parents, optionally). + + Attributes of these (and only these) classes will be documented. + + Returns + ------- + Callable[[_Class], _Class] + Class decorator. + + Note + ---- + The reason behind that is the implementation of :class:`~typing.TypedDict`. + It does not include parents into __mro__, for every typed dict:: + + __mro__ = (, dict, object) + + This behaviour does not allow ``autodoc`` and ``autosummary`` process + members properly. We set special ``__doc_mro__`` attribute and read it + when building MRO for documentation. + """ + + def wrapper(cls: _Class) -> _Class: + cls.__doc_mro__ = (cls, *bases) + return cls + + return wrapper diff --git a/thor_devkit/deprecation.py b/thor_devkit/deprecation.py new file mode 100644 index 0000000..d3ec673 --- /dev/null +++ b/thor_devkit/deprecation.py @@ -0,0 +1,71 @@ +"""Deprecation helpers.""" +import warnings +from functools import partial, wraps +from typing import Any, Callable, Type, TypeVar, cast + +_C = TypeVar("_C", bound=Callable[..., Any]) +_T = TypeVar("_T") + + +def deprecated_to_property(func: _C) -> _C: + """Mark function as deprecated in favor of property.""" + assert not func.__doc__ or ".. deprecated::" in func.__doc__ + + @wraps(func) + def inner(*args: Any, **kwargs: Any) -> Any: + warnings.warn( + DeprecationWarning("This method is deprecated, use property instead") + ) + return func(*args, **kwargs) + + return cast(_C, inner) + + +def renamed_class(new_name: str) -> Callable[[Type[_T]], Type[_T]]: + """Mark class as renamed.""" + + def decorator(cls: Type[_T]) -> Type[_T]: + assert not cls.__doc__ or ".. deprecated::" in cls.__doc__ + + def __init__(self: _T, *args: Any, **kwargs: Any) -> None: # noqa: N807 + warnings.warn( + DeprecationWarning( + f"Class {cls.__name__} was renamed, use {new_name} instead" + ) + ) + super(cls, self).__init__(*args, **kwargs) # type: ignore + + cls.__init__ = __init__ # type: ignore + return cls + + return decorator + + +def _renamed_function(new_name: str, kind: str) -> Callable[[_C], _C]: + """Mark function or method as renamed.""" + + def decorator(func: _C) -> _C: + assert not func.__doc__ or ".. deprecated::" in func.__doc__ + + @wraps(func) + def inner(*args: Any, **kwargs: Any) -> Any: + warnings.warn( + DeprecationWarning( + f"{kind} {func.__name__} is deprecated. Use {new_name} instead." + ) + ) + return func(*args, **kwargs) + + return cast(_C, inner) + + return decorator + + +renamed_function = partial(_renamed_function, kind="Function") +renamed_method = partial(_renamed_function, kind="Method") + + +def deprecated(obj: _T) -> _T: + """Marker for anything deprecated for another reasons.""" + assert not getattr(obj, "__doc__", "") or ".. deprecated::" in str(obj.__doc__) + return obj diff --git a/thor_devkit/exceptions.py b/thor_devkit/exceptions.py new file mode 100644 index 0000000..a9c0021 --- /dev/null +++ b/thor_devkit/exceptions.py @@ -0,0 +1,42 @@ +"""Custom exceptions. + +:exc:`DeserializationError` and :exc:`SerializationError` are aliases for +:exc:`rlp.exception.DeserializationError` and +:exc:`rlp.exception.SerializationError` +(they aren't listed on their +`documentation page `_). +""" +from typing import Any, Optional + +from rlp.exceptions import DeserializationError as DeserializationError +from rlp.exceptions import SerializationError as SerializationError + +__all__ = [ + "DeserializationError", + "SerializationError", + "BadSignature", + "BadTransaction", +] + + +class _DefaultTextExceptionMixin(BaseException): + @property + def _message(self) -> str: + if self.__doc__: + return self.__doc__ + raise NotImplementedError + + def __init__( + self, message: Optional[str] = None, *args: Any, **kwargs: Any + ) -> None: + if message is None: + message = self._message + super().__init__(message, *args, **kwargs) + + +class BadSignature(_DefaultTextExceptionMixin, Exception): + """The signature of certificate does not match with the signer.""" + + +class BadTransaction(_DefaultTextExceptionMixin, ValueError): + """The decoded transaction is invalid.""" diff --git a/thor_devkit/rlp.py b/thor_devkit/rlp.py index 80a6d14..84c6f74 100644 --- a/thor_devkit/rlp.py +++ b/thor_devkit/rlp.py @@ -1,91 +1,106 @@ -''' -RLP Encoding/Decoding. +r"""RLP Encoding/Decoding layer for "real-world" objects.""" +import sys +import warnings +from abc import ABC, abstractmethod +from itertools import dropwhile +from typing import ( + Any, + Dict, + Generic, + Iterable, + List, + Mapping, + Optional, + Sequence, + Tuple, + TypeVar, + Union, +) -RLP encodes before storing on disk or transmitting on network. - -Primary RLP can only deal with "item" type, -The definition of item is: -1) byte string (bytes in Python) -2) list - -Some exmples are: -- b'\x00\xff' -- empty list [] -- list of bytes [ b'\x00', b'\x01\x03'] -- list of combinations [ [], b'\x00', [b'\x00']] - -The encoded result is bytes. The encoded methods is called RLP. - - RLP +-----------+ - item +-------> |RPL encoded| - +-----------+ - -But in the real world, the inputs are not pure bytes nor lists. -Some are of complex key-value pairs like dict. -Some are of "0x123" form of number. - -This module exists for some pre-defined -real world object => "item" conversion. - - serialize - "real world object" +--------------> item - -''' -from typing import Tuple -from typing import Union -from typing import List -from typing import Any -import re -from rlp.sedes import BigEndianInt -from rlp.exceptions import DeserializationError, SerializationError -from rlp import encode as rlp_encode from rlp import decode as rlp_decode +from rlp import encode as rlp_encode +from rlp.sedes import BigEndianInt +from voluptuous.error import Invalid + +from thor_devkit import validation +from thor_devkit.cry.utils import izip +from thor_devkit.deprecation import deprecated, renamed_class +from thor_devkit.exceptions import DeserializationError, SerializationError + +if sys.version_info < (3, 10): + from typing_extensions import TypeGuard +else: + from typing import TypeGuard + + +__all__ = [ + # Main + "ComplexCodec", + # Scalar + "BytesKind", + "NumericKind", + "BlobKind", + "FixedBlobKind", + "OptionalFixedBlobKind", + "CompactFixedBlobKind", + # Wrappers + "DictWrapper", + "ListWrapper", + "HomoListWrapper", + # Abstract + "AbstractSerializer", + "ScalarKind", + "BaseWrapper", +] + +# We lack recursive types with mypy +_PackedSequenceT = Sequence[ + Union[bytes, Sequence[Union[bytes, Sequence[Union[bytes, Sequence[Any]]]]]] +] +_T = TypeVar("_T") + + +class AbstractSerializer(Generic[_T], ABC): + """Abstract class for all serializers. + + .. versionadded:: 2.0.0 + """ + + @abstractmethod + def serialize(self, __obj: _T) -> Union[bytes, _PackedSequenceT]: + """Serialize the object into a RLP encodable "item".""" + raise NotImplementedError + + @abstractmethod + def deserialize(self, __serial: Any) -> _T: + """Deserialize given bytes into higher-level object.""" + raise NotImplementedError + + +class ScalarKind(AbstractSerializer[_T]): + """Abstract class for all scalar serializers (they accept "basic" values).""" + + @abstractmethod + def serialize(self, __obj: _T) -> bytes: + """Serialize the object into a RLP encodable "item".""" + raise NotImplementedError + + @abstractmethod + def deserialize(self, __serial: bytes) -> _T: + """Deserialize given bytes into higher-level object.""" + raise NotImplementedError + + +class BytesKind(ScalarKind[bytes]): + """Convert bytes type of Python object to RLP "item".""" - -def _is_hex_string(a: str, must_contain_data: bool) -> bool: - c = None - if must_contain_data: - c = re.compile('^0x[0-9a-f]+$', re.I) - else: - c = re.compile('^0x[0-9a-f]*$', re.I) - - if c.match(a): - return True - else: - return False - - -def _is_decimal_string(a: str) -> bool: - c = re.compile('^[0-9]+$') - if c.match(a): - return True - else: - return False - - -def _is_pure_int(a: int) -> bool: - return type(a) == int - - -def _is_pure_str(a: str) -> bool: - return type(a) == str - - -class ScalarKind(): - pass - - -class BytesKind(ScalarKind): - ''' - Convert bytes type of Python object to RLP "item". - ''' @classmethod - def is_valid_type(cls, obj): + def is_valid_type(cls, obj: object) -> TypeGuard[bytes]: + """Confirm that ``obj`` is :class:`bytes` or :class:`bytearray`.""" return isinstance(obj, (bytes, bytearray)) def serialize(self, obj: bytes) -> bytes: - ''' - Serialize the object into a RLP encode-able "item". + """Serialize the object into a RLP encodable "item". Parameters ---------- @@ -99,18 +114,18 @@ def serialize(self, obj: bytes) -> bytes: Raises ------ - SerializationError - raise if input is not bytes. - ''' + TypeError + If input is not bytes. + """ if not self.is_valid_type(obj): - raise SerializationError( - 'type of "obj" param is not right, bytes required.', obj) + raise TypeError( + f'Expected parameter of type "bytes", got: {type(obj)}', obj + ) return obj def deserialize(self, serial: bytes) -> bytes: - ''' - De-serialize a RLP "item" back to bytes. + """Deserialize a RLP "item" back to bytes. Parameters ---------- @@ -124,213 +139,241 @@ def deserialize(self, serial: bytes) -> bytes: Raises ------ - DeserializationError - raise if input is not bytes. - ''' + TypeError + If input is not bytes. + """ if not self.is_valid_type(serial): - raise DeserializationError( - 'type of "serial" param is not right, bytes required.', serial) + raise TypeError( + f'Expected parameter of type "bytes", got: {type(serial)}', serial + ) return serial -class NumericKind(ScalarKind, BigEndianInt): - ''' - This is a pre-defined type for Number-like objects. +class NumericKind(BigEndianInt, ScalarKind[int]): + """Serializer for number-like objects. + + Good examples are:: - Good examples are: - '0x0', '0x123', '0', '100', 0, 0x123 + '0x0', '0x123', '0', '100', 0, 0x123, True - Bad examples are: - '0x123z', {}, '0x', -1, '0x12345678123456780', 2 ** 64 - ''' + Bad examples are:: - def __init__(self, max_bytes: int = None): - ''' - Initialize a NumericKind. + '0x123z', {}, '0x', -1, '0x12345678123456780' + + .. versionchanged:: 2.0.0 + Allowed :class:`bool` values :class:`True` and :class:`False`. + """ + + max_bytes: Optional[int] + """Maximal allowed size of number, in bytes.""" + + def __init__(self, max_bytes: Optional[int] = None) -> None: + """Initialize a NumericKind. Parameters ---------- - max_bytes : int - Max bytes in the encoded result. (not enough then prepend 0) - ''' + max_bytes : Optional[int], optional + Max bytes in the encoded result (prepend 0 if there's not enough) + """ self.max_bytes = max_bytes super().__init__(l=max_bytes) def serialize(self, obj: Union[str, int]) -> bytes: - ''' - Serialize the object into a RLP encode-able "item". + """Serialize the object into a RLP encodable "item". Parameters ---------- - obj : Union[str, int] - obj is either number in string or number int. - ''' - - if not (_is_pure_str(obj) or _is_pure_int(obj)): - raise SerializationError("The input is not str nor int.", obj) + obj : str or int + obj is either int or string representation of int parseable by :func:`int`. - number = None - - if _is_pure_str(obj): - if _is_hex_string(obj, True): - number = int(obj, 16) - - if _is_decimal_string(obj): - number = int(obj) + Returns + ------- + bytes + Serialized data - if _is_pure_int(obj): - number = obj + Raises + ------ + SerializationError + If input data is malformed + TypeError + If input is neither int nor string representation of int + """ + if isinstance(obj, str): + try: + number = int(obj, 0) + except ValueError: + raise SerializationError( + "The input string does not represent a number.", obj + ) + elif isinstance(obj, int): + number = int(obj) + else: + raise TypeError(f"expected str or int, got: {type(obj)}") - # remove leading 0 from bytes sequence. result_bytes = super().serialize(number) - byte_list = [] - can_append_flag = False - for x in result_bytes: - if not can_append_flag: - if x != 0: - can_append_flag = True - else: - continue - - if can_append_flag: - byte_list.append(x) - return bytes(byte_list) + # remove leading 0 from bytes sequence. + return bytes(dropwhile(lambda x: not x, result_bytes)) - def deserialize(self, serial) -> int: - ''' - Deserialize bytes to int. + def deserialize(self, serial: bytes) -> int: + """Deserialize bytes to int. Parameters ---------- - serial : [type] + serial : bytes bytes Returns ------- int - integer + Deserialized number. Raises ------ DeserializationError If bytes contain leading 0. - ''' - if len(serial) > 0 and serial[0] == 0: + """ + if serial and not serial[0]: raise DeserializationError( - "Leading 0 should be removed from bytes", - serial + "byte string must not have leading zeroes", serial ) # add leading 0 to bytes sequence if width is set. if self.max_bytes: - byte_list = [x for x in serial] - length = len(byte_list) - missed = self.max_bytes - length - if missed: - byte_list = [0] * missed + byte_list - serial2 = bytes(byte_list) - else: - serial2 = serial - return super().deserialize(serial2) + serial = serial.rjust(self.max_bytes, b"\x00") + return super().deserialize(serial) -class BlobKind(ScalarKind): - ''' - This is a pre-defined type for '0x....' like hex strings, - which shouldn't be interpreted as a number, usually an identifier. +class BlobKind(ScalarKind[str]): + """Serializer for ``0x....`` hex strings. - like: address, block_ref, data to smart contract. - ''' + Used for strings that shouldn't be interpreted as a number, usually an identifier. + + Examples: address, block_ref, data to smart contract. + """ def serialize(self, obj: str) -> bytes: - ''' - Serialize a '0x...' string to bytes. + """Serialize a ``0x...`` string to bytes. Parameters ---------- obj : str - '0x...' style string. + ``0x...`` style string. Returns ------- bytes - the "item" that can be rlp encodeded. - ''' - if not _is_hex_string(obj, False): - raise SerializationError('expect 0x... style string', obj) - - if len(obj) % 2 != 0: - raise SerializationError( - 'expect 0x... style string of even length.', obj) - - obj2 = obj[2:] # remove '0x' + Encoded string. - return bytes.fromhex(obj2) + Raises + ------ + SerializationError + If input data is malformed. + """ + try: + return validation.hex_string(allow_prefix=True, to_bytes=True)(obj) + except Invalid as e: + raise SerializationError(str(e), obj) def deserialize(self, serial: bytes) -> str: - ''' - Deserialize bytes to '0x...' string. + """Deserialize bytes to ``0x...`` string. Parameters ---------- serial : bytes - the bytes. + Encoded string. Returns ------- str - string of style '0x...' - ''' + string of style ``0x...`` - return '0x' + serial.hex() + Raises + ------ + TypeError + If input is not ``bytes`` nor ``bytearray`` + """ + if not isinstance(serial, (bytes, bytearray)): + raise TypeError(f"expected bytes, got: {type(serial)}") + + return "0x" + serial.hex() class FixedBlobKind(BlobKind): - ''' - This is a pre-defined type for '0x....' like hex strings, - which shouldn't be interpreted as a number, usually an identifier. + """Serializer for ``0x....`` **fixed-length** hex strings. - like: address, block_ref, data to smart contract. + Used for strings that shouldn't be interpreted as a number, usually an identifier. + Examples: address, block_ref, data to smart contract. Note ---- This kind has a fixed length of bytes. (also means the input hex is fixed length) - ''' + """ + + byte_length: int + """Length of blob, in bytes.""" - def __init__(self, byte_length): + def __init__(self, byte_length: int) -> None: self.byte_length = byte_length def serialize(self, obj: str) -> bytes: - # 0x counts for 2 chars. 1 bytes = 2 hex char. - allowed_hex_length = self.byte_length * 2 + 2 + """Serialize a ``0x...`` string to bytes. - if len(obj) != allowed_hex_length: - raise SerializationError( - "Max allowed string length {}".format(allowed_hex_length), - obj - ) + Parameters + ---------- + obj : str + ``0x...`` style string. + + Returns + ------- + bytes + Encoded string. + + Raises + ------ + SerializationError + If input data is malformed (e.g. wrong length) + """ + try: + validation.hex_string(self.byte_length * 2, allow_prefix=True)(obj) + except Invalid as e: + raise SerializationError(str(e), obj) from e return super().serialize(obj) def deserialize(self, serial: bytes) -> str: + """Deserialize bytes to ``0x...`` string. + + Parameters + ---------- + serial : bytes + Encoded string. + + Returns + ------- + str + String of style ``0x...'`` + + Raises + ------ + DeserializationError + If input is malformed (e.g. wrong length) + """ if len(serial) != self.byte_length: raise DeserializationError( - "Bytes should be {} long.".format(self.byte_length), - serial + f"Bytes should be of length {self.byte_length}", serial ) return super().deserialize(serial) -class NoneableFixedBlobKind(FixedBlobKind): - ''' - This is a pre-defined type for '0x....' like hex strings, - which shouldn't be interpreted as a number, usually an identifier. +class OptionalFixedBlobKind(FixedBlobKind): + """Serializer for ``0x....`` fixed-length hex strings that may be :class:`None`. - like: address, block_ref, data to smart contract. + Used for strings that shouldn't be interpreted as a number, usually an identifier. + Examples: address, block_ref, data to smart contract. Note ---- @@ -339,275 +382,521 @@ class NoneableFixedBlobKind(FixedBlobKind): For this kind, input can be None. Then decoded is also None. - ''' + """ - def __init__(self, byte_length): - super().__init__(byte_length) + def serialize(self, obj: Optional[str] = None) -> bytes: + """Serialize a ``0x...`` string or :class:`None` to bytes. - def serialize(self, obj: str = None) -> bytes: + Parameters + ---------- + obj : Optional[str], default: None + ``0x...`` style string. + + Returns + ------- + bytes + Encoded string. + """ if obj is None: return bytes(0) return super().serialize(obj) - def deserialize(self, serial: bytes) -> str: - if len(serial) == 0: + # Unsafe override + def deserialize(self, serial: bytes) -> Optional[str]: # type: ignore[override] + """Deserialize bytes to ``0x...`` string or :class:`None`. + + Parameters + ---------- + serial : bytes + Serialized data. + + Returns + ------- + Optional[str] + String of style ``0x...`` or :class:`None` + """ + if not serial: return None return super().deserialize(serial) +@renamed_class("NoneableFixedBlobKind") +class NoneableFixedBlobKind(OptionalFixedBlobKind): + """Deprecated alias for :class:`OptionalFixedBlobKind`. + + .. deprecated:: 2.0.0 + Use :class:`OptionalFixedBlobKind` instead. + + .. customtox-exclude:: + """ + + class CompactFixedBlobKind(FixedBlobKind): - ''' - This is a pre-defined type for '0x....' like strings, - which shouldn't be interpreted as a number, usually an identifier. + """Serializer for ``0x....`` fixed-length hex strings that may start with zeros. - like: address, block_ref, data to smart contract. + Used for strings that shouldn't be interpreted as a number, usually an identifier. + Examples: address, block_ref, data to smart contract. Note ---- When encode, the result fixed length bytes will be - removed of leading zeros. i.e. 000123 -> 123 + removed of leading zeros. i.e. ``000123 -> 123`` When decode, it expects the input bytes length <= fixed_length. - and it pads the leading zeros back. Output '0x{0}paddingxxx...' - ''' - - def __init__(self, byte_length): - super().__init__(byte_length) + and it pads the leading zeros back. Output ``'0x{"0" * n}xxx...'`` + """ def serialize(self, obj: str) -> bytes: - b = super().serialize(obj) - first_non_zero_index = -1 - for idx, each in enumerate(b): - if each != 0: - first_non_zero_index = idx - break + """Serialize a ``0x...`` string to bytes, stripping leading zeroes. - b_list = [] - if first_non_zero_index != -1: - b_list = b[first_non_zero_index:] + Parameters + ---------- + obj : str + ``0x...`` style string. - if (len(b_list) == 0): - return bytes(0) - else: - return bytes(b_list) + Returns + ------- + bytes + Encoded string with leading zeroes removed. + """ + b = super().serialize(obj) + return bytes(dropwhile(lambda x: not x, b)) def deserialize(self, serial: bytes) -> str: - if (len(serial) > self.byte_length): + """Deserialize bytes to ``0x...`` string. + + Parameters + ---------- + serial : bytes + Encoded data. + + Returns + ------- + str + String of style ``0x...`` of fixed length + + Raises + ------ + DeserializationError + If input is malformed. + """ + if len(serial) > self.byte_length: raise DeserializationError( - "Bytes too long, only need {}".format(self.byte_length), - serial + "Bytes too long, only need {}".format(self.byte_length), serial ) - if len(serial) == 0 or serial[0] == 0: + if serial and not serial[0]: raise DeserializationError( - "No leading zeros. And byte sequence length should be > 0", - serial + "Byte sequence must have no leading zeroes", serial ) - missing = self.byte_length - len(serial) - b_list = [0] * missing + [x for x in serial] - return super().deserialize(bytes(b_list)) + padded = bytes(serial).rjust(self.byte_length, b"\x00") + return super().deserialize(padded) + + +class BaseWrapper(AbstractSerializer[_T]): + """Abstract serializer for complex types.""" + @abstractmethod + def serialize(self, __obj: _T) -> _PackedSequenceT: + """Serialize the object into a RLP encodable "item". -class BaseWrapper(): - ''' BaseWrapper is a container for complex types to be encode/decoded. ''' - pass + .. versionadded:: 2.0.0 + """ + raise NotImplementedError + @abstractmethod + def deserialize(self, __serial: _PackedSequenceT) -> _T: + """Deserialize given bytes into higher-level object. -class DictWrapper(BaseWrapper): - ''' DictWrapper is a container for parsing dict like objects. ''' + .. versionadded:: 2.0.0 + """ + raise NotImplementedError - def __init__(self, list_of_tuples: List[Tuple[str, Union[BaseWrapper, ScalarKind]]]): - '''Constructor + +class DictWrapper(BaseWrapper[Mapping[str, Any]]): + """A container for working with dict-like objects.""" + + keys: Sequence[str] + """Field names.""" + codecs: Sequence[AbstractSerializer[Any]] + """Codecs to use for each field.""" + + def __init__( + self, + codecs: Union[ + Sequence[Tuple[str, AbstractSerializer[Any]]], + Mapping[str, AbstractSerializer[Any]], + ], + ) -> None: + """Create wrapper from items. Parameters ---------- - list_of_tuples : List[Tuple[str, Union[BaseWrapper, ScalarKind]]] - A list of tuples. - eg. [(key, codec), (key, codec) ... ]) - key is a string. - codec is either a BaseWrapper, or a ScalarKind. - ''' - self.keys = [x[0] for x in list_of_tuples] - self.codecs = [x[1] for x in list_of_tuples] + codecs : Mapping[str, BaseWrapper or ScalarKind] or its ``.values()``-like list + Codecs to use. + Possible values (codec is any BaseWrapper or ScalarKind): + + - Any mapping from str to codec, e.g. ``{'foo': NumericKind()}`` + - Any sequence of tuples ``(name, codec)``, + e.g. ``[('foo', NumericKind())]`` + """ + if isinstance(codecs, Mapping): + self.keys, self.codecs = izip(*codecs.items()) + else: + self.keys, self.codecs = izip(*codecs) + def __len__(self) -> int: + """Count of serializable objects.""" + return len(self.codecs) -class ListWrapper(BaseWrapper): - ''' - ListWrapper is a container for parsing a list, - the items type in the list can be heterogeneous. - ''' + def serialize(self, obj: Mapping[str, Any]) -> _PackedSequenceT: + """Serialize dictionary to sequence of serialized values. - def __init__(self, list_of_codecs: List[Union[BaseWrapper, ScalarKind]]): - '''Constructor + .. versionadded:: 2.0.0 Parameters ---------- - list_of_codecs : List[Union[BaseWrapper, ScalarKind]] + obj: Mapping[str, Any] + Dictionary to serialize. + + Returns + ------- + Sequence[bytes or Sequence[...]] (recursive) + Sequence of serialized values. + + Raises + ------ + SerializationError + If input is malformed. + """ + try: + return [ + codec.serialize(obj[key]) + for (key, codec, _) in izip(self.keys, self.codecs, obj) + ] + except KeyError as e: + raise SerializationError(f"Missing key: '{e.args[0]}'", obj) + except ValueError as e: + raise SerializationError( + f"Keys count differs: expected {len(obj)}, got {len(self)}", obj + ) from e + + def deserialize(self, serial: _PackedSequenceT) -> Dict[str, Any]: + """Deserialize sequence of encoded values to dictionary with serialized values. + + .. versionadded:: 2.0.0 + + Parameters + ---------- + obj: Sequence[bytes or Sequence[...]] (recursive) + Sequence of values to deserialize. + + Returns + ------- + Mapping[str, Any] + Deserialized values, mapping field names to decoded values. + + Raises + ------ + DeserializationError + If input is malformed. + """ + try: + return { + key: codec.deserialize(blob) + for (blob, key, codec) in izip(serial, self.keys, self.codecs) + } + except ValueError as e: + raise DeserializationError( + f"Keys count differs: expected {len(serial)}, got {len(self)}", + serial, + ) from e + + +class ListWrapper(BaseWrapper[Sequence[Any]]): + """Container for parsing a heterogeneous list. + + The items in the list can be of different types. + """ + + codecs: Sequence[AbstractSerializer[Any]] + """Codecs to use for each element of sequence.""" + + def __init__(self, codecs: Sequence[AbstractSerializer[Any]]) -> None: + """Create wrapper from items. + + Parameters + ---------- + codecs : Sequence[AbstractSerializer] A list of codecs. eg. [codec, codec, codec...] codec is either a BaseWrapper, or a ScalarKind. - ''' - self.codecs = list_of_codecs + """ + self.codecs = list(codecs) + def __len__(self) -> int: + """Count of serializable objects.""" + return len(self.codecs) -class HomoListWrapper(BaseWrapper): - ''' - HomoListWrapper is a container for parsing a list, - the items in the list are of the same type. - ''' + def serialize(self, obj: Sequence[Any]) -> _PackedSequenceT: + """Serialize sequence (list) of values to sequence of serialized values. - def __init__(self, codec: Union[BaseWrapper, ScalarKind]): - '''Constructor + .. versionadded:: 2.0.0 Parameters ---------- - list_of_codecs : List[Union[BaseWrapper, ScalarKind]] - A list of codecs. - eg. [codec, codec, codec...] + obj: Sequence[Any] + Sequence of values to serialize. + + Returns + ------- + Sequence[bytes or Sequence[...]] (recursive) + Sequence of serialized values. + + Raises + ------ + SerializationError + If input is malformed. + """ + try: + return [codec.serialize(item) for (item, codec) in izip(obj, self.codecs)] + except ValueError as e: + raise SerializationError( + f"Items count differs: expected {len(obj)}, got {len(self)}", obj + ) from e + + def deserialize(self, serial: _PackedSequenceT) -> Sequence[Any]: + """Deserialize sequence of encoded values to sequence. + + .. versionadded:: 2.0.0 + + Parameters + ---------- + obj: Sequence[bytes or Sequence[...]] (recursive) + Sequence of values to deserialize. + + Returns + ------- + Sequence[Any] + Deserialized values. + + Raises + ------ + DeserializationError + If input is malformed. + """ + try: + return [ + codec.deserialize(blob) for (blob, codec) in izip(serial, self.codecs) + ] + except ValueError as e: + raise DeserializationError( + f"Items count differs: expected {len(serial)}, got {len(self)}", + serial, + ) from e + + +class HomoListWrapper(BaseWrapper[Sequence[Any]]): + """Container for parsing a homogeneous list. + + Used when the items in the list are of the same type. + """ + + codec: AbstractSerializer[Any] + """Codec to use for each element of array.""" + + def __init__(self, codec: AbstractSerializer[Any]) -> None: + """Create wrapper from items. + + Parameters + ---------- + codec : AbstractSerializer codec is either a BaseWrapper, or a ScalarKind. - ''' + """ self.codec = codec + def serialize(self, obj: Sequence[Any]) -> _PackedSequenceT: + """Serialize sequence (list) of values to sequence of serialized values. + + .. versionadded:: 2.0.0 + + Parameters + ---------- + obj: Sequence[Any] + Sequence of values to serialize. + + Returns + ------- + Sequence[bytes or Sequence[...]] (recursive) + Sequence of serialized values. + + Raises + ------ + SerializationError + If input is malformed. + """ + return [self.codec.serialize(item) for item in obj] + + def deserialize(self, serial: _PackedSequenceT) -> Sequence[Any]: + """Deserialize sequence of encoded values to sequence. + + .. versionadded:: 2.0.0 + + Parameters + ---------- + obj: Sequence[bytes or Sequence[...]] (recursive) + Sequence of values to deserialize. + + Returns + ------- + Sequence[Any] + Deserialized values. + + Raises + ------ + DeserializationError + If input is malformed. + """ + return [self.codec.deserialize(blob) for blob in serial] + + +@deprecated +def pack(obj: Any, wrapper: AbstractSerializer[Any]) -> Union[bytes, _PackedSequenceT]: + """Pack a Python object according to wrapper. + + .. deprecated:: 2.0.0 + Use ``.serialize`` directly instead. -def pack(obj, wrapper: Union[BaseWrapper, ScalarKind]) -> Union[bytes, List]: - '''Pack a Python object according to wrapper. + .. customtox-exclude:: Parameters ---------- obj : Any A dict, a list, or a string/int/any... - wrapper : Union[BaseWrapper, ScalarKind] + wrapper : AbstractSerializer[Any] A Wrapper. Returns ------- - Union[bytes, List] - Returns either the bytes if obj is a basic type, - or a list if obj is dict/list. + bytes + If obj is a basic type. + List of packed items + If obj is dict/list. Raises ------ - Exception - If the wrapper/codec is unknown. - ''' - # Simple wrapper: ScalarKind - if isinstance(wrapper, ScalarKind): - return wrapper.serialize(obj) - - # Complicated wrapper: BaseWrapper - if isinstance(wrapper, BaseWrapper): - if isinstance(wrapper, DictWrapper): - r = [] - for (key, codec) in zip(wrapper.keys, wrapper.codecs): - r.append(pack(obj[key], codec)) - return r - - if isinstance(wrapper, ListWrapper): - r = [] - for (item, codec) in zip(obj, wrapper.codecs): - r.append(pack(item, codec)) - return r - - if isinstance(wrapper, HomoListWrapper): - r = [] - for item in obj: - r.append(pack(item, wrapper.codec)) - return r - - raise Exception('codec type is unknown.') - - # Wrapper type is unknown, raise. - raise Exception('wrapper type is unknown.{}'.format(wrapper)) - - -def unpack(packed: Union[List, bytes], wrapper: Union[BaseWrapper, ScalarKind]) -> Union[dict, List, Any]: - '''Unpack a serialized thing back into a dict/list or a Python basic type. + SerializationError + If data cannot be serialized using specified codec. + TypeError + If wrapper type is unknown. + """ + warnings.warn("Function 'pack' is deprecated. Use '.serialize' instead.") + + if not isinstance(wrapper, AbstractSerializer): + raise TypeError(f"Wrapper type is unknown: {type(wrapper)}") + + return wrapper.serialize(obj) + + +@deprecated +def unpack( + packed: Union[bytes, _PackedSequenceT], + wrapper: AbstractSerializer[Any], +) -> Union[Dict[str, Any], List[Any], Any]: + """Unpack a serialized thing back into a dict/list or a Python basic type. + + .. deprecated:: 2.0.0 + Use ``.deserialize`` directly instead. + + .. customtox-exclude:: Parameters ---------- - packed : Union[List, bytes] - A list of RLP encoded or pure bytes. - wrapper : Union[BaseWrapper, ScalarKind] + packed : bytes or sequence of them + A list of RLP encoded or pure bytes (may be nested). + wrapper : AbstractSerializer[Any] The Wrapper. Returns ------- - Union[dict, List, Any] + Dict[str, Any] or List[Any] or Any dict/list if the wrapper instruction is dict/list, Python basic type if input is bytes. Raises ------ - Exception - If the wrapper/codec is unknown. - ''' - # Simple wrapper: ScalarKind - if isinstance(wrapper, ScalarKind): - return wrapper.deserialize(packed) - - # Complicated wrapper: BaseWrapper - if isinstance(wrapper, BaseWrapper): - if isinstance(wrapper, DictWrapper): - r = {} - for (blob, key, codec) in zip(packed, wrapper.keys, wrapper.codecs): - r[key] = unpack(blob, codec) - return r - - if isinstance(wrapper, ListWrapper): - r = [] - for (blob, codec) in zip(packed, wrapper.codecs): - r.append(unpack(blob, codec)) - return r - - if isinstance(wrapper, HomoListWrapper): - r = [] - for blob in packed: - r.append(unpack(blob, wrapper.codec)) - return r - - raise Exception('codec type is unknown.') - - # Wrapper type is unknown, raise. - raise Exception('wrapper type is unknown.') - - -def pretty_print(packed: Union[bytes, List], indent: int): - ''' Debug function. - Input: [] or bytes, or [bytes, [], [bytes]] - Target: Pretty print the bytes into hex. - and print indentation of grouped list brackets. - ''' - # indent of items - internalIndent = 2 + DeserializationError + If data cannot be deserialized using specified codec. + TypeError + If wrapper type is unknown. + """ + warnings.warn( + "Function 'unpack' is deprecated. Use '.deserialize' instead." + ) + + if not isinstance(wrapper, AbstractSerializer): + raise TypeError("Wrapper type is unknown.") + + return wrapper.deserialize(packed) + + +def pretty_print( + packed: Union[bytes, _PackedSequenceT], indent: int = 0 +) -> None: # pragma: no cover + """Pretty print the bytes into hex, indenting nested structures. + + Parameters + ---------- + packed : bytes or sequence of them + Data to print (may be nested). + indent : int, default: 0 + Indent of topmost object, in spaces. + + Returns + ------- + None + """ + # indent of items + internal_indent = 2 # bytes? Direct print it. if isinstance(packed, (bytes, bytearray)): - if len(packed) == 0: - print(" " * (indent) + "(empty byte[])") - else: - print(" " * (indent) + packed.hex()) + print(" " * (indent) + (packed.hex() or "(empty byte[])")) return - + # list? - if isinstance(packed, list): + elif isinstance(packed, Iterable): + # mypy isn't smart enough to deduce this from first `if`-branch + assert not isinstance(packed, (bytes, bytearray)) + print(" " * (indent) + "[") for each in packed: - pretty_print(each, indent + internalIndent) + pretty_print(each, indent + internal_indent) print(" " * (indent) + "]") -class ComplexCodec(object): - def __init__(self, wrapper: BaseWrapper): +class ComplexCodec: + """Wrapper around :class:`BaseWrapper` that implements RLP encoding. + + Abstract layer to join serialization and encoding + (and reverse operations) together. + """ + + wrapper: AbstractSerializer[Any] + """:class:`BaseWrapper` or :class:`ScalarKind` to use for serialization.""" + + def __init__(self, wrapper: AbstractSerializer[Any]) -> None: self.wrapper = wrapper def encode(self, data: Any) -> bytes: - packed = pack(data, self.wrapper) - # pretty_print(packed, 0) # Uncomment for debugging. + """Serialize and RLP-encode given high-level data to bytes.""" + packed = self.wrapper.serialize(data) return rlp_encode(packed) - def decode(self, data: bytes): + def decode(self, data: bytes) -> Any: + """RLP-decode and deserialize given bytes into higher-level structure.""" to_be_unpacked = rlp_decode(data) - return unpack(to_be_unpacked, self.wrapper) + return self.wrapper.deserialize(to_be_unpacked) diff --git a/thor_devkit/transaction.py b/thor_devkit/transaction.py index 4a9045b..ba2d9b7 100644 --- a/thor_devkit/transaction.py +++ b/thor_devkit/transaction.py @@ -1,363 +1,591 @@ -''' -Transaction class defines VeChain's multi-clause transaction (tx). +"""Transaction class defines VeChain's multi-clause transaction (tx). -This module defines data structure of a tx, and the encoding/decoding of tx data. -''' -from voluptuous import Schema, Any, Optional, REMOVE_EXTRA -from typing import Union, List +This module defines data structure of a transaction, +and the encoding/decoding of transaction data. +""" +import sys +import warnings from copy import deepcopy -from .rlp import NumericKind, CompactFixedBlobKind, NoneableFixedBlobKind, BlobKind, BytesKind -from .rlp import DictWrapper, HomoListWrapper -from .rlp import ComplexCodec -from .cry import blake2b256 -from .cry import secp256k1 -from .cry import address - -# Kind Definitions -# Used for VeChain's "reserved features" kind. -FeaturesKind = NumericKind(4) - -# Unsigned/Signed RLP Wrapper. -_params = [ - ("chainTag", NumericKind(1)), - ("blockRef", CompactFixedBlobKind(8)), - ("expiration", NumericKind(4)), - ("clauses", HomoListWrapper(codec=DictWrapper([ - ("to", NoneableFixedBlobKind(20)), - ("value", NumericKind(32)), - ("data", BlobKind()) - ]))), - ("gasPriceCoef", NumericKind(1)), - ("gas", NumericKind(8)), - ("dependsOn", NoneableFixedBlobKind(32)), - ("nonce", NumericKind(8)), - ("reserved", HomoListWrapper(codec=BytesKind())) +from typing import Any, Dict, List, Optional, Sequence, Union + +import voluptuous +from voluptuous import REMOVE_EXTRA, Schema + +from thor_devkit.cry import address, blake2b256, secp256k1 +from thor_devkit.deprecation import deprecated, deprecated_to_property +from thor_devkit.exceptions import BadTransaction +from thor_devkit.rlp import ( + AbstractSerializer, + BlobKind, + BytesKind, + CompactFixedBlobKind, + ComplexCodec, + DictWrapper, + HomoListWrapper, + NumericKind, + OptionalFixedBlobKind, +) +from thor_devkit.validation import address_type, hex_integer + +if sys.version_info < (3, 8): + from typing_extensions import Final, TypedDict +else: + from typing import Final, TypedDict +if sys.version_info < (3, 11): + from typing_extensions import NotRequired +else: + from typing import NotRequired + + +__all__ = [ + # Main + "Transaction", + # Types + "ClauseT", + "ReservedT", + "TransactionBodyT", + # Schemas + "CLAUSE", + "RESERVED", + "BODY", + # Other + "UnsignedTxWrapper", + "SignedTxWrapper", + "data_gas", + "intrinsic_gas", ] -# Unsigned Tx Wrapper -UnsignedTxWrapper = DictWrapper(_params) +FeaturesKind: Final = NumericKind(4) +"""Kind Definitions. Used for VeChain's "reserved features" kind.""" + +# Unsigned/signed RLP wrapper parameters. +_params: Final[Dict[str, AbstractSerializer[Any]]] = { + "chainTag": NumericKind(1), + "blockRef": CompactFixedBlobKind(8), + "expiration": NumericKind(4), + "clauses": HomoListWrapper( + DictWrapper( + { + "to": OptionalFixedBlobKind(20), + "value": NumericKind(32), + "data": BlobKind(), + } + ) + ), + "gasPriceCoef": NumericKind(1), + "gas": NumericKind(8), + "dependsOn": OptionalFixedBlobKind(32), + "nonce": NumericKind(8), + "reserved": HomoListWrapper(codec=BytesKind()), +} + +UnsignedTxWrapper: Final = DictWrapper(_params) +"""Unsigned transaction wrapper. + +:meta hide-value: +""" + +SignedTxWrapper: Final = DictWrapper({**_params, "signature": BytesKind()}) +"""Signed transaction wrapper. + +:meta hide-value: +""" + -# Signed Tx Wrapper -SignedTxWrapper = DictWrapper( _params + [("signature", BytesKind())] ) +class ClauseT(TypedDict): + """Type of transaction clause. -CLAUSE = Schema( + .. versionadded:: 2.0.0 + """ + + to: Optional[str] + """Transaction target contract, or ``None`` to create new one.""" + value: Union[str, int] + """Amount to be paid (integer or its hex representation with ``0x``).""" + data: str + """VET to pass to the call.""" + + +CLAUSE: Final = Schema( { - "to": Any(str, None), # Destination contract address, or set to None to create contract. - "value": Any(str, int), # VET to pass to the call. - "data": str + # Destination contract address, or set to None to create contract. + "to": voluptuous.Any(address_type(), None), + "value": voluptuous.Any(hex_integer(to_int=True), int), # VET to pass to call. + "data": hex_integer(allow_empty=True), }, required=True, - extra=REMOVE_EXTRA + extra=REMOVE_EXTRA, ) +""" +Validation :external:class:`~voluptuous.schema_builder.Schema` for transaction clause. + +:meta hide-value: + +.. versionchanged:: 2.0.0 + Added validation of ``to``, ``value`` and ``data`` as hex string. +""" + +class ReservedT(TypedDict, total=False): + """Type of ``reserved`` transaction field. -RESERVED = Schema( + .. versionadded:: 2.0.0 + """ + + features: int + """Integer (8 bit) with features bits set.""" + unused: Sequence[bytes] + """Unused reserved fields.""" + + +RESERVED: Final = Schema( { - Optional("features"): int, # int. - Optional("unused"): [bytes] + voluptuous.Optional("features"): int, # int. + voluptuous.Optional("unused"): [voluptuous.Any(bytes, bytearray)], # "unused" In TypeScript version is of type: Buffer[] # Buffer itself is "byte[]", # which is equivalent to "bytes"/"bytearray" in Python. # So Buffer[] is "[bytes]"/"[bytearray]" in Python. }, required=True, - extra=REMOVE_EXTRA + extra=REMOVE_EXTRA, ) - - -BODY = Schema( +""" +Validation :external:class:`~voluptuous.schema_builder.Schema` +for ``reserved`` transaction field. + +:meta hide-value: +""" + + +class TransactionBodyT(TypedDict): + """Type of transaction body. + + .. versionadded:: 2.0.0 + """ + + chainTag: int # noqa: N815 + """Last byte of genesis block ID""" + blockRef: str # noqa: N815 + """Block reference, ``0x...``-like hex string (8 bytes). + + First 4 bytes are block height, the rest is part of referred block ID. + """ + expiration: int + """Expiration (relative to blockRef, in blocks)""" + clauses: Sequence[ClauseT] + """Transaction clauses.""" + gasPriceCoef: int # noqa: N815 + """Coefficient to calculate gas price.""" + gas: Union[int, str] + """Maximum of gas to be consumed (int or its hex representation with ``0x``).""" + dependsOn: Optional[str] # noqa: N815 + """Address of transaction on which current transaction depends.""" + nonce: Union[int, str] + """Transaction nonce (int or its hex representation with ``0x``).""" + reserved: NotRequired[ReservedT] + """Reserved field.""" + + +BODY: Final = Schema( { "chainTag": int, - "blockRef": str, + "blockRef": hex_integer(16), "expiration": int, "clauses": [CLAUSE], "gasPriceCoef": int, - "gas": Any(str, int), - "dependsOn": Any(str, None), - "nonce": Any(str, int), - Optional("reserved"): RESERVED + "gas": voluptuous.Any(hex_integer(to_int=True), int), + "dependsOn": voluptuous.Any(address_type(), None), + "nonce": voluptuous.Any(hex_integer(to_int=True), int), + voluptuous.Optional("reserved"): RESERVED, }, required=True, - extra=REMOVE_EXTRA + extra=REMOVE_EXTRA, ) +""" +Validation :external:class:`~voluptuous.schema_builder.Schema` +for transaction body. + +:meta hide-value: + +.. versionchanged:: 2.0.0 + Added validation of ``gas``, ``dependsOn`` and ``nonce`` fields as hex string. +""" def data_gas(data: str) -> int: - ''' - Calculate the gas the data will consume. + """Calculate the gas the data will consume. Parameters ---------- data : str - '0x...' style hex string. - ''' + ``0x...`` style hex string. + + Returns + ------- + int + Estimated gas consumption. + """ Z_GAS = 4 NZ_GAS = 68 - sum_up = 0 - for x in range(2, len(data), 2): - if data[x] == '0' and data[x+1] == '0': - sum_up += Z_GAS - else: - sum_up += NZ_GAS + return sum( + Z_GAS if odd == even == "0" else NZ_GAS + for odd, even in zip(data[2::2], data[3::2]) + ) - # print('sum_up', sum_up) - return sum_up - -def intrinsic_gas(clauses: List) -> int: - ''' - Calculate roughly the gas from a list of clauses. +def intrinsic_gas(clauses: Sequence[ClauseT]) -> int: + """Calculate roughly the gas from a list of clauses. Parameters ---------- - clauses : List + clauses : Sequence[ClauseT] A list of clauses (in dict format). Returns ------- int - The sum of gas. - ''' + The amount of gas. + """ TX_GAS = 5000 CLAUSE_GAS = 16000 CLAUSE_CONTRACT_CREATION = 48000 - if len(clauses) == 0: + if not clauses: return TX_GAS + CLAUSE_GAS - sum_total = 0 - sum_total += TX_GAS - + sum_total = TX_GAS for clause in clauses: - clause_sum = 0 - if clause['to']: # contract create. - clause_sum += CLAUSE_GAS + if clause["to"]: # Existing contract. + sum_total += CLAUSE_GAS else: - clause_sum += CLAUSE_CONTRACT_CREATION - clause_sum += data_gas(clause['data']) - - sum_total += clause_sum + sum_total += CLAUSE_CONTRACT_CREATION + sum_total += data_gas(clause["data"]) return sum_total -def right_trim_empty_bytes(m_list: List[bytes]) -> List: - ''' Given a list of bytes, remove the b'' from the tail of the list.''' - right_most_none_empty = None - for i in range(len(m_list) - 1, -1, -1): - if len(m_list[i]) != 0: - right_most_none_empty = i - break +def right_trim_empty_bytes(m_list: Sequence[bytes]) -> List[bytes]: + """Given a list of bytes, remove the b'' from the tail of the list.""" + rightmost_none_empty = next( + (idx for idx, item in enumerate(reversed(m_list)) if item), None + ) - if right_most_none_empty is None: # not found the right most none-empty string item + if rightmost_none_empty is None: return [] - return_list = m_list[:right_most_none_empty+1] + return list(m_list[: len(m_list) - rightmost_none_empty]) - return return_list +class Transaction: + """Multi-clause transaction. -class Transaction(): - # The reserved feature of delegated (vip-191) is 1. - DELEGATED_MASK = 1 + .. autoclasssumm:: Transaction + """ - def __init__(self, body: dict): - ''' Construct a transaction from a given body. ''' - self.body = BODY(body) - self.signature = None - - def get_body(self, as_copy:bool = True): - ''' - Get a dict of the body represents the transaction. - If as_copy, return a newly created dict. - If not, return the body used in this Transaction object. + DELEGATED_MASK: Final = 1 + """Mask for delegation bit. - Parameters - ---------- - as_copy : bool, optional - Return a new dict clone of the body, by default True - ''' - if as_copy: - return deepcopy(self.body) - else: - return self.body + The reserved feature of delegated (vip-191) is 1. + """ - def _encode_reserved(self) -> List: - r = self.body.get('reserved', None) - if not r: - reserved = {"features": None, "unused": None} - else: - reserved = self.body['reserved'] + _signature: Optional[bytes] = None - f = reserved.get('features') or 0 - l = reserved.get('unused') or [] - m_list = [FeaturesKind.serialize(f)] + l + def __init__(self, body: TransactionBodyT) -> None: + """Construct a transaction from a given body.""" + self._body: TransactionBodyT = BODY(body) - return_list = right_trim_empty_bytes(m_list) + @property + def body(self) -> TransactionBodyT: + """Get a dict of the body that represents the transaction.""" + return self._body - return return_list + def copy_body(self) -> TransactionBodyT: + """Get a deep copy of transaction body.""" + return deepcopy(self.body) - def get_signing_hash(self, delegate_for: str = None) -> bytes: - reserved_list = self._encode_reserved() - _temp = deepcopy(self.body) - _temp.update({ - "reserved": reserved_list - }) - buff = ComplexCodec(UnsignedTxWrapper).encode(_temp) + def _encode_reserved(self) -> List[bytes]: + reserved = self.body.get("reserved", {}) + f = reserved.get("features") or 0 + unused: List[bytes] = list(reserved.get("unused", [])) or [] + m_list = [FeaturesKind.serialize(f)] + unused + + return right_trim_empty_bytes(m_list) + + def get_signing_hash(self, delegate_for: Optional[str] = None) -> bytes: + """Get signing hash (with delegate address if given).""" + buff = self.encode(force_unsigned=True) h, _ = blake2b256([buff]) if delegate_for: if not address.is_address(delegate_for): - raise Exception("delegate_for should be an address type.") + raise ValueError("delegate_for should be an address type.") x, _ = blake2b256([h, bytes.fromhex(delegate_for[2:])]) return x return h - def get_intrinsic_gas(self) -> int: - ''' Get the rough gas this tx will consume''' - return intrinsic_gas(self.body['clauses']) + @property + def intrinsic_gas(self) -> int: + """Roughly estimate amount of gas this transaction will consume. - def get_signature(self) -> Union[None, bytes]: - ''' Get the signature of current transaction.''' - return self.signature + .. versionadded:: 2.0.0 + """ + return intrinsic_gas(self.body["clauses"]) - def set_signature(self, sig: bytes): - ''' Set the signature ''' - self.signature = sig + @property + def signature(self) -> Optional[bytes]: + """Signature of transaction. + + .. versionadded:: 2.0.0 + """ + return self._signature + + @signature.setter + def signature(self, sig: Optional[bytes]) -> None: + """Set signature of transaction. + + .. versionadded:: 2.0.0 + """ + self._signature = bytes(sig) if sig is not None else sig - def get_origin(self) -> Union[None, str]: - if not self._signature_valid(): + @property + def origin(self) -> Optional[str]: + """Transaction origin. + + .. versionadded:: 2.0.0 + """ + if not self._signature_is_valid(): return None + sig = self.signature + assert sig is not None + try: my_sign_hash = self.get_signing_hash() - pub_key = secp256k1.recover( - my_sign_hash, self.get_signature()[0:65]) - return '0x' + address.public_key_to_address(pub_key).hex() - except: + pub_key = secp256k1.recover(my_sign_hash, sig[:65]) + return "0x" + address.public_key_to_address(pub_key).hex() + except ValueError: return None - def get_delegator(self) -> Union[None, str]: - if not self.is_delegated(): + @property + def delegator(self) -> Optional[str]: + """Transaction delegator. + + .. versionadded:: 2.0.0 + """ + if not self.is_delegated: return None - if not self._signature_valid(): + if not self._signature_is_valid(): return None - origin = self.get_origin() + sig = self.signature + assert sig is not None + + origin = self.origin if not origin: return None try: my_sign_hash = self.get_signing_hash(origin) - pub_key = secp256k1.recover( - my_sign_hash, self.get_signature()[65:]) - return '0x' + address.public_key_to_address(pub_key).hex() - except: + pub_key = secp256k1.recover(my_sign_hash, sig[65:]) + return "0x" + address.public_key_to_address(pub_key).hex() + except ValueError: return None - def is_delegated(self): - ''' Check if this transaction is delegated.''' - if not self.body.get('reserved'): - return False + @property + def is_delegated(self) -> bool: + """Check if this transaction is delegated. - if not self.body.get('reserved').get('features'): - return False + .. versionchanged:: 2.0.0 + :attr:`is_delegated` is a property now. - return self.body['reserved']['features'] & self.DELEGATED_MASK == self.DELEGATED_MASK + """ + if not self.body.get("reserved", {}).get("features"): + return False - def _signature_valid(self) -> bool: - if self.is_delegated(): - expected_sig_len = 65 * 2 - else: - expected_sig_len = 65 + return ( + self.body["reserved"]["features"] & self.DELEGATED_MASK + == self.DELEGATED_MASK + ) - if not self.get_signature(): - return False - else: - return len(self.get_signature()) == expected_sig_len + @property + def id(self) -> Optional[str]: # noqa: A003 + """Transaction id. - def get_id(self) -> Union[None, str]: - if not self._signature_valid(): + .. versionadded:: 2.0.0 + """ + if not self._signature_is_valid(): return None + + sig = self.signature + assert sig is not None + try: my_sign_hash = self.get_signing_hash() - pub_key = secp256k1.recover( - my_sign_hash, self.get_signature()[0:65]) + pub_key = secp256k1.recover(my_sign_hash, sig[:65]) origin = address.public_key_to_address(pub_key) - return '0x' + blake2b256([my_sign_hash, origin])[0].hex() - except: + return "0x" + blake2b256([my_sign_hash, origin])[0].hex() + except ValueError: return None - def encode(self): - ''' Encode the tx into bytes ''' - reserved_list = self._encode_reserved() - temp = deepcopy(self.body) - temp.update({ - 'reserved': reserved_list - }) - - if self.signature: - temp.update({ - 'signature': self.signature - }) - return ComplexCodec(SignedTxWrapper).encode(temp) + def _signature_is_valid(self) -> bool: + if not self.signature: + return False else: + expected_sig_len = 65 * 2 if self.is_delegated else 65 + return len(self.signature) == expected_sig_len + + def encode(self, force_unsigned: bool = False) -> bytes: + """Encode the tx into bytes.""" + reserved_list = self._encode_reserved() + temp = dict(self.copy_body()) # cast to dict for mypy + temp["reserved"] = reserved_list + + if not self.signature or force_unsigned: return ComplexCodec(UnsignedTxWrapper).encode(temp) + else: + temp["signature"] = self.signature + return ComplexCodec(SignedTxWrapper).encode(temp) @staticmethod - def decode(raw: bytes, unsigned: bool): - ''' Return a Transaction type instance ''' - body = None + def decode(raw: bytes, unsigned: bool) -> "Transaction": + """Create a Transaction type instance from encoded bytes.""" sig = None if unsigned: body = ComplexCodec(UnsignedTxWrapper).decode(raw) else: - decoded = ComplexCodec(SignedTxWrapper).decode(raw) - sig = decoded['signature'] # bytes - del decoded['signature'] - body = decoded - - r = body.get('reserved', []) # list of bytes - if len(r) > 0: - if len(r[-1]) == 0: - raise Exception('invalid reserved fields: not trimmed.') - - features = FeaturesKind.deserialize(r[0]) - body['reserved'] = { - 'features': features - } + body = ComplexCodec(SignedTxWrapper).decode(raw) + sig = body.pop("signature") # bytes + + r = body.pop("reserved", []) # list of bytes + if r: + if not r[-1]: + raise BadTransaction("invalid reserved fields: not trimmed.") + + reserved = {"features": FeaturesKind.deserialize(r[0])} if len(r) > 1: - body['reserved']['unused'] = r[1:] - else: - del body['reserved'] + reserved["unused"] = r[1:] + body["reserved"] = RESERVED(reserved) # Now body is a "dict", we try to check if it is in good shape. # Check if clause is in good shape. - _clauses = [] - for each in body['clauses']: - _clauses.append( CLAUSE(each) ) - body['clauses'] = _clauses - - # Check if reserved is in good shape. - _reserved = None - if body.get('reserved'): - _reserved = RESERVED(body['reserved']) - body['reserved'] = _reserved + body["clauses"] = [CLAUSE(c) for c in body["clauses"]] tx = Transaction(body) if sig: - tx.set_signature(sig) + tx.signature = sig return tx - def __eq__(self, other): - ''' Compare two tx to be the same? ''' - flag_1 = (self.signature == other.signature) - flag_2 = self.encode() == other.encode() # only because of ["reserved"]["unused"] may glitch. - return flag_1 and flag_2 \ No newline at end of file + def __eq__(self, other: object) -> bool: + if not isinstance(other, Transaction): + return NotImplemented + + return ( + self.signature == other.signature + # only because of ["reserved"]["unused"] may glitch. + and self.encode() == other.encode() + ) + + @deprecated_to_property + def get_delegator(self) -> Optional[str]: + """Get delegator. + + .. customtox-exclude:: + + .. deprecated:: 2.0.0 + Use :attr:`delegator` property instead. + """ + return self.delegator + + @deprecated_to_property + def get_intrinsic_gas(self) -> int: + """Get intrinsic gas estimate. + + .. customtox-exclude:: + + .. deprecated:: 2.0.0 + Use :attr:`intrinsic_gas` property instead. + """ + return self.intrinsic_gas + + @deprecated_to_property + def get_signature(self) -> Optional[bytes]: + """Get signature. + + .. customtox-exclude:: + + .. deprecated:: 2.0.0 + Use :attr:`signature` property instead. + """ + return self.signature + + @deprecated_to_property + def set_signature(self, sig: bytes) -> None: + """Set signature. + + .. customtox-exclude:: + + .. deprecated:: 2.0.0 + Use :attr:`signature` property setter instead. + """ + self.signature = sig + + @deprecated_to_property + def get_origin(self) -> Optional[str]: + """Get origin. + + .. customtox-exclude:: + + .. deprecated:: 2.0.0 + Use :attr:`origin` property instead. + """ + return self.origin + + @deprecated_to_property + def get_id(self) -> Optional[str]: + """Get transaction ID. + + .. customtox-exclude:: + + .. deprecated:: 2.0.0 + Use :attr:`.id` property instead. + """ + return self.id + + @deprecated + def get_body(self, as_copy: bool = True) -> TransactionBodyT: + """Get a dict of the body that represents the transaction. + + .. deprecated:: 2.0.0 + Use :meth:`body` or :meth:`copy_body` instead. + + .. customtox-exclude:: + + Parameters + ---------- + as_copy : bool, default: True + Return a new dict clone of the body + + Returns + ------- + TransactionBodyT + If as_copy, return a newly created dict. + If not, return the body of this Transaction object. + """ + warnings.warn( + DeprecationWarning( + "Method 'get_body' is deprecated." + " Use 'body' property or 'copy_body' method instead." + ) + ) + if as_copy: + return self.copy_body() + else: + return self.body diff --git a/thor_devkit/validation.py b/thor_devkit/validation.py new file mode 100644 index 0000000..79e9d88 --- /dev/null +++ b/thor_devkit/validation.py @@ -0,0 +1,217 @@ +"""Helper functions for :mod:`voluptuous` validation.""" +import sys +import warnings +from typing import Callable, Optional, Union, overload + +from voluptuous.error import Invalid + +if sys.version_info < (3, 8): + from typing_extensions import Literal +else: + from typing import Literal + +__all__ = ["hex_integer", "hex_string", "address_type"] + + +@overload +def hex_integer( + length: Optional[int] = ..., + *, + to_int: Literal[False] = ..., + require_prefix: bool = ..., + allow_empty: bool = False, +) -> Callable[[str], str]: + ... + + +@overload +def hex_integer( + length: Optional[int] = None, + *, + to_int: Literal[True], + require_prefix: bool = True, + allow_empty: bool = False, +) -> Callable[[str], int]: + ... + + +def hex_integer( + length: Optional[int] = None, + *, + to_int: bool = False, + require_prefix: bool = True, + allow_empty: bool = False, +) -> Union[Callable[[str], str], Callable[[str], int]]: + """Validate and normalize hex representation of number. + + Normalized form: ``0x{val}``, ``val`` is in lower case. + + Parameters + ---------- + length: Optional[int] + Expected length of string, excluding prefix. + to_int: bool, default: False + Normalize given string to integer. + require_prefix: bool, default: True + Require ``0x`` prefix. + allow_empty: bool, default: False + Allow empty string (or ``0x`` if ``require_prefix=True``) + + Returns + ------- + Callable[[str], str] + Validator callable. + """ + assert not length or length >= 0, "Negative lengths not allowed." + + if length == 0 and not allow_empty: + allow_empty = True + warnings.warn( + RuntimeWarning( + "String with length=0 cannot be non-empty," + " pass allow_empty=True explicitly." + ) + ) + + def validate(value: str) -> Union[int, str]: + if not isinstance(value, str): + raise Invalid(f"Expected string, got: {type(value)}") + + value = value.lower() + if not value.startswith("0x"): + if require_prefix: + raise Invalid('Expected hex string, that must start with "0x"') + else: + value = value[2:] + + real_length = len(value) + if length is not None and real_length != length: + raise Invalid( + f"Expected hex representation of length {length}, got {real_length}" + ) + + try: + int_value = int(value, 16) + except ValueError as e: + if allow_empty and value in {"", "0x"}: + int_value = 0 + else: + raise Invalid( + "Expected hex string, that is convertible to number" + ) from e + + if to_int: + return int_value + + return "0x" + value + + # We can define two functions in branches of ``to_int`` flag, but it will be + # longer and less readable. Just ignore: we are sure that return is + # either int or str depending on the flag. + return validate # type: ignore + + +@overload +def hex_string( + length: Optional[int] = ..., + *, + to_bytes: Literal[False] = ..., + allow_empty: bool = False, + allow_prefix: bool = ..., +) -> Callable[[str], str]: + ... + + +@overload +def hex_string( + length: Optional[int] = None, + *, + to_bytes: Literal[True], + allow_empty: bool = False, + allow_prefix: bool = True, +) -> Callable[[str], bytes]: + ... + + +def hex_string( + length: Optional[int] = None, + *, + to_bytes: bool = False, + allow_empty: bool = False, + allow_prefix: bool = False, +) -> Union[Callable[[str], str], Callable[[str], bytes]]: + """Validate and normalize hex representation of bytes (like :meth:`bytes.hex`). + + Normalized form: without ``0x`` prefix, in lower case. + + Parameters + ---------- + length: Optional[int] + Expected length of string. + allow_empty: bool, default: False + Allow empty string. + allow_prefix: bool, default: True + Allow ``0x`` prefix in input. + + Returns + ------- + Callable[[str], str] + Validator callable. + """ + assert not length or length >= 0, "Negative lengths not allowed." + + if length == 0 and not allow_empty: + allow_empty = True + warnings.warn( + RuntimeWarning( + "String with length=0 cannot be non-empty," + " pass allow_empty=True explicitly." + ) + ) + + def validate(value: str) -> Union[bytes, str]: + if not isinstance(value, str): + raise Invalid(f"Expected string, got: {type(value)}") + + value = value.lower() + if len(value) % 2: + raise Invalid("Expected hex representation of even length") + + if value.startswith("0x"): + if not allow_prefix: + raise Invalid("Expected hex string without '0x' prefix.") + value = value[2:] + + bytes_count = len(value) + if length is not None and bytes_count != length: + raise Invalid( + f"Expected hex representation of length {length}, got {bytes_count}" + ) + + try: + binary = bytes.fromhex(value) + except ValueError as e: + raise Invalid("Expected hex string, that is convertible to bytes") from e + + return binary if to_bytes else value + + # We can define two functions in branches od ``to_bytes`` flag, but it will be + # longer and less readable. Just ignore: we are sure that return is + # either int or str depending on the flag. + return validate # type: ignore + + +def address_type() -> Callable[[str], str]: + """Validate and normalize address (40 bytes, with or without prefix). + + Returns + ------- + Callable[[str], str] + Validator callable. + """ + + def validate(value: str) -> str: + base_validator = hex_integer(40, require_prefix=False) + return base_validator(value) + + return validate