diff --git a/hathor/consensus/consensus.py b/hathor/consensus/consensus.py index 44c76d141..7e8d0c32a 100644 --- a/hathor/consensus/consensus.py +++ b/hathor/consensus/consensus.py @@ -450,6 +450,9 @@ def _feature_activation_rules(self, tx: Transaction, new_best_block: Block) -> b case Feature.FEE_TOKENS: if not self._fee_tokens_activation_rule(tx, is_active): return False + case Feature.TRANSFER_HEADER: + if not self._transfer_headers_activation_rule(tx, is_active): + return False case Feature.COUNT_CHECKDATASIG_OP: if not self._checkdatasig_count_rule(tx): return False @@ -522,6 +525,18 @@ def _checkdatasig_count_rule(self, tx: Transaction) -> bool: return False return True + def _transfer_headers_activation_rule(self, tx: Transaction, is_active: bool) -> bool: + """ + Check whether a tx became invalid because the reorg changed the transfer-headers feature activation state. + """ + if is_active: + return True + + if tx.has_transfer_header(): + return False + + return True + def _opcodes_v2_activation_rule(self, tx: Transaction, new_best_block: Block) -> bool: """Check whether a tx became invalid because of the opcodes V2 feature.""" from hathor.verification.nano_header_verifier import NanoHeaderVerifier diff --git a/hathor/dag_builder/builder.py b/hathor/dag_builder/builder.py index a2b5b8910..3fe2b31b3 100644 --- a/hathor/dag_builder/builder.py +++ b/hathor/dag_builder/builder.py @@ -47,6 +47,8 @@ NC_WITHDRAWAL_KEY = 'nc_withdrawal' TOKEN_VERSION_KEY = 'token_version' FEE_KEY = 'fee' +NC_TRANSFER_INPUT_KEY = 'nc_transfer_input' +NC_TRANSFER_OUTPUT_KEY = 'nc_transfer_output' class DAGBuilder: @@ -240,6 +242,22 @@ def _add_nc_attribute(self, name: str, key: str, value: str) -> None: actions.append((token, amount)) node.attrs[key] = actions + elif key == NC_TRANSFER_INPUT_KEY: + transfer_inputs = node.get_attr_list(key, default=[]) + token, amount, (wallet,) = parse_amount_token(value) + if amount < 0: + raise SyntaxError(f'unexpected negative amount in `{key}`') + transfer_inputs.append((wallet, token, amount)) + node.attrs[key] = transfer_inputs + + elif key == NC_TRANSFER_OUTPUT_KEY: + transfer_outputs = node.get_attr_list(key, default=[]) + token, amount, (wallet,) = parse_amount_token(value) + if amount < 0: + raise SyntaxError(f'unexpected negative amount in `{key}`') + transfer_outputs.append((wallet, token, amount)) + node.attrs[key] = transfer_outputs + else: node.attrs[key] = value diff --git a/hathor/dag_builder/vertex_exporter.py b/hathor/dag_builder/vertex_exporter.py index fb26f93fd..739c825a5 100644 --- a/hathor/dag_builder/vertex_exporter.py +++ b/hathor/dag_builder/vertex_exporter.py @@ -13,6 +13,7 @@ # limitations under the License. import ast +import hashlib import re from collections import defaultdict from types import ModuleType @@ -23,7 +24,15 @@ from hathor.conf.settings import HathorSettings from hathor.crypto.util import decode_address, get_address_from_public_key_bytes from hathor.daa import DifficultyAdjustmentAlgorithm -from hathor.dag_builder.builder import FEE_KEY, NC_DEPOSIT_KEY, NC_WITHDRAWAL_KEY, DAGBuilder, DAGNode +from hathor.dag_builder.builder import ( + FEE_KEY, + NC_DEPOSIT_KEY, + NC_TRANSFER_INPUT_KEY, + NC_TRANSFER_OUTPUT_KEY, + NC_WITHDRAWAL_KEY, + DAGBuilder, + DAGNode, +) from hathor.dag_builder.types import DAGNodeType, VertexResolverType, WalletFactoryType from hathor.dag_builder.utils import get_literal, is_literal from hathor.nanocontracts import Blueprint, OnChainBlueprint @@ -42,6 +51,7 @@ from hathor.transaction.base_transaction import TxInput, TxOutput from hathor.transaction.headers.fee_header import FeeHeader, FeeHeaderEntry from hathor.transaction.headers.nano_header import ADDRESS_LEN_BYTES +from hathor.transaction.headers.transfer_header import TxTransferInput, TxTransferOutput from hathor.transaction.scripts.p2pkh import P2PKH from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.wallet import BaseWallet, HDWallet, KeyPair @@ -330,6 +340,69 @@ def add_headers_if_needed(self, node: DAGNode, vertex: BaseTransaction) -> None: """Add the configured headers.""" self.add_nano_header_if_needed(node, vertex) self.add_fee_header_if_needed(node, vertex) + self.add_transfer_header_if_needed(node, vertex) + + def _get_token_index(self, token_name: str, vertex: Transaction) -> int: + token_index = 0 + if token_name != 'HTR': + token_creation_tx = self._vertices[token_name] + if token_creation_tx.hash not in vertex.tokens: + vertex.tokens.append(token_creation_tx.hash) + token_index = 1 + vertex.tokens.index(token_creation_tx.hash) + return token_index + + def add_transfer_header_if_needed(self, node: DAGNode, vertex: BaseTransaction) -> None: + inputs = node.get_attr_list(NC_TRANSFER_INPUT_KEY, default=[]) + outputs = node.get_attr_list(NC_TRANSFER_OUTPUT_KEY, default=[]) + + if not inputs and not outputs: + return + + if not isinstance(vertex, Transaction): + raise TypeError('TransferHeader is only supported for transactions') + + transfer_inputs: list[TxTransferInput] = [] + for wallet_name, token_name, amount in inputs: + wallet = self.get_wallet(wallet_name) + assert isinstance(wallet, HDWallet) + privkey = wallet.get_key_at_index(0) + pubkey_bytes = privkey.sec() + address = get_address_from_public_key_bytes(pubkey_bytes) + token_index = self._get_token_index(token_name, vertex) + + sighash_data = vertex.get_sighash_all_data() + sighash_data_hash = hashlib.sha256(sighash_data).digest() + signature = privkey.sign(sighash_data_hash) + script = P2PKH.create_input_data(public_key_bytes=pubkey_bytes, signature=signature) + + transfer_inputs.append(TxTransferInput( + address=address, + amount=amount, + token_index=token_index, + script=script, + )) + + transfer_outputs: list[TxTransferOutput] = [] + for wallet_name, token_name, amount in outputs: + wallet = self.get_wallet(wallet_name) + assert isinstance(wallet, HDWallet) + privkey = wallet.get_key_at_index(0) + pubkey_bytes = privkey.sec() + address = get_address_from_public_key_bytes(pubkey_bytes) + token_index = self._get_token_index(token_name, vertex) + transfer_outputs.append(TxTransferOutput( + address=address, + amount=amount, + token_index=token_index, + )) + + from hathor.transaction.headers import TransferHeader + transfer_header = TransferHeader( + tx=vertex, + inputs=transfer_inputs, + outputs=transfer_outputs, + ) + vertex.headers.append(transfer_header) def add_nano_header_if_needed(self, node: DAGNode, vertex: BaseTransaction) -> None: if 'nc_id' not in node.attrs: diff --git a/hathor/feature_activation/feature.py b/hathor/feature_activation/feature.py index 480ef5685..0448f5587 100644 --- a/hathor/feature_activation/feature.py +++ b/hathor/feature_activation/feature.py @@ -32,4 +32,5 @@ class Feature(StrEnum): COUNT_CHECKDATASIG_OP = 'COUNT_CHECKDATASIG_OP' NANO_CONTRACTS = 'NANO_CONTRACTS' FEE_TOKENS = 'FEE_TOKENS' + TRANSFER_HEADER = 'TRANSFER_HEADER' OPCODES_V2 = 'OPCODES_V2' diff --git a/hathor/feature_activation/utils.py b/hathor/feature_activation/utils.py index 47a01235b..5ebdc39fb 100644 --- a/hathor/feature_activation/utils.py +++ b/hathor/feature_activation/utils.py @@ -36,16 +36,18 @@ class Features: nanocontracts: bool fee_tokens: bool opcodes_version: OpcodesVersion + transfer_headers: bool = False @staticmethod def from_vertex(*, settings: HathorSettings, feature_service: FeatureService, vertex: Vertex) -> Features: - """Return whether the Nano Contracts feature is active according to the provided settings and vertex.""" + """Return active/inactive state for every runtime feature according to the provided settings and vertex.""" from hathorlib.conf.settings import FeatureSetting feature_states = feature_service.get_feature_states(vertex=vertex) feature_settings = { Feature.COUNT_CHECKDATASIG_OP: FeatureSetting.FEATURE_ACTIVATION, Feature.NANO_CONTRACTS: settings.ENABLE_NANO_CONTRACTS, Feature.FEE_TOKENS: settings.ENABLE_FEE_BASED_TOKENS, + Feature.TRANSFER_HEADER: settings.ENABLE_TRANSFER_HEADER, Feature.OPCODES_V2: settings.ENABLE_OPCODES_V2, } @@ -61,6 +63,7 @@ def from_vertex(*, settings: HathorSettings, feature_service: FeatureService, ve nanocontracts=feature_is_active[Feature.NANO_CONTRACTS], fee_tokens=feature_is_active[Feature.FEE_TOKENS], opcodes_version=opcodes_version, + transfer_headers=feature_is_active[Feature.TRANSFER_HEADER], ) diff --git a/hathor/nanocontracts/blueprint_env.py b/hathor/nanocontracts/blueprint_env.py index bc220fd97..2eba29510 100644 --- a/hathor/nanocontracts/blueprint_env.py +++ b/hathor/nanocontracts/blueprint_env.py @@ -27,6 +27,7 @@ from hathor.nanocontracts.rng import NanoRNG from hathor.nanocontracts.runner import Runner from hathor.nanocontracts.storage import NCContractStorage + from hathor.nanocontracts.types import Address NCAttrCache: TypeAlias = dict[bytes, Any] | None @@ -266,3 +267,7 @@ def setup_new_contract( actions=actions, fees=fees or (), ) + + def transfer_to_address(self, address: Address, amount: Amount, token: TokenUid) -> None: + """Transfer a given amount of token to an address balance.""" + self.__runner.syscall_transfer_to_address(address, amount, token) diff --git a/hathor/nanocontracts/execution/block_executor.py b/hathor/nanocontracts/execution/block_executor.py index 47355e499..ea12e7882 100644 --- a/hathor/nanocontracts/execution/block_executor.py +++ b/hathor/nanocontracts/execution/block_executor.py @@ -18,10 +18,11 @@ import hashlib import traceback +from collections import defaultdict from dataclasses import dataclass from typing import TYPE_CHECKING, Iterator -from hathor.nanocontracts.exception import NCFail +from hathor.nanocontracts.exception import NCFail, NCInsufficientFunds from hathor.transaction import Block, Transaction from hathor.transaction.exceptions import TokenNotFound from hathor.transaction.nc_execution_state import NCExecutionState @@ -32,6 +33,7 @@ from hathor.nanocontracts.runner.runner import RunnerFactory from hathor.nanocontracts.sorter.types import NCSorterCallable from hathor.nanocontracts.storage import NCBlockStorage, NCStorageFactory + from hathor.nanocontracts.types import Address, TokenUid # Transaction execution result types (also used as block execution effects) @@ -233,19 +235,23 @@ def execute_transaction( block_storage.set_address_seqnum(nc_address, nc_header.nc_seqnum) return NCTxExecutionSkipped(tx=tx) + transfer_header_diffs = self._get_transfer_header_diffs(tx) runner = self._runner_factory.create( block_storage=block_storage, seed=rng_seed, ) try: + self._verify_transfer_header_balances(block_storage, transfer_header_diffs) runner.execute_from_tx(tx) # after the execution we have the latest state in the storage # and at this point no tokens pending creation self._verify_sum_after_execution(tx, block_storage) + self._apply_transfer_header_diffs(block_storage, transfer_header_diffs) except NCFail as e: + self._ensure_runner_has_last_call_info(tx, runner) return NCTxExecutionFailure( tx=tx, runner=runner, @@ -255,6 +261,65 @@ def execute_transaction( return NCTxExecutionSuccess(tx=tx, runner=runner) + def _get_transfer_header_diffs(self, tx: Transaction) -> dict[tuple['Address', 'TokenUid'], int]: + from hathor.nanocontracts.types import Address, TokenUid + + diffs: defaultdict[tuple[Address, TokenUid], int] = defaultdict(int) + if not tx.has_transfer_header(): + return dict(diffs) + + transfer_header = tx.get_transfer_header() + for txin in transfer_header.inputs: + token_uid = TokenUid(tx.get_token_uid(txin.token_index)) + diffs[(Address(txin.address), token_uid)] -= txin.amount + + for txout in transfer_header.outputs: + token_uid = TokenUid(tx.get_token_uid(txout.token_index)) + diffs[(Address(txout.address), token_uid)] += txout.amount + + return dict(diffs) + + def _verify_transfer_header_balances( + self, + block_storage: 'NCBlockStorage', + transfer_header_diffs: dict[tuple['Address', 'TokenUid'], int], + ) -> None: + for (address, token_uid), diff in transfer_header_diffs.items(): + if diff >= 0: + continue + + balance = block_storage.get_address_balance(address, token_uid) + if balance + diff < 0: + raise NCInsufficientFunds( + f'insufficient transfer-header balance for address={address.hex()} ' + f'token={token_uid.hex()}: available={balance} required={-diff}' + ) + + def _apply_transfer_header_diffs( + self, + block_storage: 'NCBlockStorage', + transfer_header_diffs: dict[tuple['Address', 'TokenUid'], int], + ) -> None: + from hathor.nanocontracts.types import Amount + + for (address, token_uid), diff in transfer_header_diffs.items(): + if diff == 0: + continue + block_storage.add_address_balance(address, Amount(diff), token_uid) + + def _ensure_runner_has_last_call_info(self, tx: Transaction, runner: 'Runner') -> None: + from hathor.nanocontracts.types import ContractId, VertexId + + if runner._last_call_info is not None: + return + + nano_header = tx.get_nano_header() + if nano_header.is_creating_a_new_contract(): + contract_id = ContractId(VertexId(tx.hash)) + else: + contract_id = ContractId(VertexId(nano_header.nc_id)) + runner._last_call_info = runner._build_call_info(contract_id) + def _verify_sum_after_execution(self, tx: Transaction, block_storage: 'NCBlockStorage') -> None: """Verify token sums after execution for dynamically created tokens.""" from hathor.verification.transaction_verifier import TransactionVerifier diff --git a/hathor/nanocontracts/runner/runner.py b/hathor/nanocontracts/runner/runner.py index 591a3c438..4e7e1c97c 100644 --- a/hathor/nanocontracts/runner/runner.py +++ b/hathor/nanocontracts/runner/runner.py @@ -65,6 +65,7 @@ NC_FALLBACK_METHOD, NC_INITIALIZE_METHOD, Address, + Amount, BaseTokenAction, BlueprintId, ContractId, @@ -91,6 +92,7 @@ from hathor.reactor import ReactorProtocol from hathor.transaction import Transaction from hathor.transaction.exceptions import InvalidFeeAmount +from hathor.transaction.headers.nano_header import ADDRESS_LEN_BYTES from hathor.transaction.storage import TransactionStorage from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.transaction.token_info import TokenDescription, TokenVersion @@ -585,6 +587,7 @@ def _validate_balances(self, ctx: Context) -> None: def _commit_all_changes_to_storage(self) -> None: """Commit all change trackers.""" assert self._call_info is not None + for nc_id, change_trackers in self._call_info.change_trackers.items(): assert len(change_trackers) == 1 change_tracker = change_trackers[0] @@ -1434,6 +1437,24 @@ def forbid_call_on_view(self, name: str) -> None: if current_call_record.type == CallType.VIEW: raise NCViewMethodError(f'@view method cannot call `syscall.{name}`') + @_forbid_syscall_from_view('transfer_to_address') + def syscall_transfer_to_address(self, address: Address, amount: Amount, token: TokenUid) -> None: + if amount < 0: + raise NCInvalidSyscall('amount cannot be negative') + + if amount == 0: + # XXX Should we fail? + return + + if not isinstance(address, Address) or len(address) != ADDRESS_LEN_BYTES: + raise NCInvalidSyscall(f'only addresses with {ADDRESS_LEN_BYTES} bytes are allowed') + + # XXX: this makes sure the token exists + self._get_token(token) + + changes_tracker = self.get_current_changes_tracker() + changes_tracker.add_address_balance(address, amount, token) + class RunnerFactory: __slots__ = ('reactor', 'settings', 'tx_storage', 'nc_storage_factory') diff --git a/hathor/nanocontracts/storage/block_storage.py b/hathor/nanocontracts/storage/block_storage.py index 36ecf0621..9f45456d0 100644 --- a/hathor/nanocontracts/storage/block_storage.py +++ b/hathor/nanocontracts/storage/block_storage.py @@ -22,9 +22,9 @@ from hathor.nanocontracts.nc_types.token_version_nc_type import TokenVersionNCType from hathor.nanocontracts.storage.contract_storage import NCContractStorage from hathor.nanocontracts.storage.patricia_trie import NodeId, PatriciaTrie -from hathor.nanocontracts.storage.token_proxy import TokenProxy -from hathor.nanocontracts.types import Address, ContractId, TokenUid -from hathor.transaction.headers.nano_header import ADDRESS_SEQNUM_SIZE +from hathor.nanocontracts.storage.restricted_block_proxy import RestrictedBlockProxy +from hathor.nanocontracts.types import Address, Amount, ContractId, TokenUid +from hathor.transaction.headers.nano_header import ADDRESS_LEN_BYTES, ADDRESS_SEQNUM_SIZE from hathor.transaction.token_info import TokenVersion from hathor.utils import leb128 @@ -32,7 +32,8 @@ class _Tag(Enum): CONTRACT = b'\0' TOKEN = b'\1' - ADDRESS = b'\2' + ADDRESS_SEQNUM = b'\2' + ADDRESS_BALANCE = b'\3' class ContractKey(NamedTuple): @@ -49,11 +50,19 @@ def __bytes__(self): return _Tag.TOKEN.value + self.token_id -class AddressKey(NamedTuple): +class AddressSeqnumKey(NamedTuple): address: Address def __bytes__(self): - return _Tag.ADDRESS.value + self.address + return _Tag.ADDRESS_SEQNUM.value + self.address + + +class AddressBalanceKey(NamedTuple): + address: Address + token_id: TokenUid + + def __bytes__(self): + return _Tag.ADDRESS_BALANCE.value + self.address + self.token_id class NCBlockStorage: @@ -115,14 +124,14 @@ def get_contract_storage(self, contract_id: ContractId) -> NCContractStorage: trie = self._get_trie(nc_root_id) except KeyError: raise NanoContractDoesNotExist(contract_id.hex()) - token_proxy = TokenProxy(self) - return NCContractStorage(trie=trie, nc_id=contract_id, token_proxy=token_proxy) + block_proxy = RestrictedBlockProxy(self) + return NCContractStorage(trie=trie, nc_id=contract_id, block_proxy=block_proxy) def get_empty_contract_storage(self, contract_id: ContractId) -> NCContractStorage: """Create a new contract storage instance for a given contract.""" trie = self._get_trie(None) - token_proxy = TokenProxy(self) - return NCContractStorage(trie=trie, nc_id=contract_id, token_proxy=token_proxy) + block_proxy = RestrictedBlockProxy(self) + return NCContractStorage(trie=trie, nc_id=contract_id, block_proxy=block_proxy) def get_token_description(self, token_id: TokenUid) -> TokenDescription: """Return the token description for a given token_id.""" @@ -161,11 +170,32 @@ def create_token( token_description_bytes = self._TOKEN_DESCRIPTION_NC_TYPE.to_bytes(token_description) self._block_trie.update(bytes(key), token_description_bytes) + def get_address_balance(self, address: Address, token_id: TokenUid) -> Amount: + key = AddressBalanceKey(address, token_id) + try: + balance_bytes = self._block_trie.get(bytes(key)) + except KeyError: + return Amount(0) + else: + balance, buf = leb128.decode_unsigned(balance_bytes) + assert len(buf) == 0 + return Amount(balance) + + def add_address_balance(self, address: Address, amount: Amount, token_id: TokenUid) -> None: + if not isinstance(address, Address) or len(address) != ADDRESS_LEN_BYTES: + raise ValueError(f'address must be Address with {ADDRESS_LEN_BYTES} bytes') + + key = AddressBalanceKey(address, token_id) + balance = Amount(self.get_address_balance(address, token_id) + amount) + assert balance >= 0 + balance_bytes = leb128.encode_unsigned(balance) + self._block_trie.update(bytes(key), balance_bytes) + def get_address_seqnum(self, address: Address) -> int: """Get the latest seqnum for an address. For clarity, new transactions must have a GREATER seqnum to be able to be executed.""" - key = AddressKey(address) + key = AddressSeqnumKey(address) try: seqnum_bytes = self._block_trie.get(bytes(key)) except KeyError: @@ -180,6 +210,6 @@ def set_address_seqnum(self, address: Address, seqnum: int) -> None: assert seqnum >= 0 old_seqnum = self.get_address_seqnum(address) assert seqnum > old_seqnum - key = AddressKey(address) + key = AddressSeqnumKey(address) seqnum_bytes = leb128.encode_unsigned(seqnum, max_bytes=ADDRESS_SEQNUM_SIZE) self._block_trie.update(bytes(key), seqnum_bytes) diff --git a/hathor/nanocontracts/storage/changes_tracker.py b/hathor/nanocontracts/storage/changes_tracker.py index 024c650f1..598491cbd 100644 --- a/hathor/nanocontracts/storage/changes_tracker.py +++ b/hathor/nanocontracts/storage/changes_tracker.py @@ -13,6 +13,7 @@ # limitations under the License. import itertools +from collections import defaultdict from dataclasses import dataclass from enum import Enum from types import MappingProxyType @@ -31,7 +32,7 @@ NCContractStorage, ) from hathor.nanocontracts.storage.types import _NOT_PROVIDED, DeletedKey, DeletedKeyType -from hathor.nanocontracts.types import BlueprintId, ContractId, TokenUid +from hathor.nanocontracts.types import Address, Amount, BlueprintId, ContractId, TokenUid from hathor.transaction.token_info import TokenDescription, TokenVersion T = TypeVar('T') @@ -81,6 +82,7 @@ def __init__(self, nc_id: ContractId, storage: NCContractStorage): self._balance_diff: dict[BalanceKey, int] = {} self._authorities_diff: dict[BalanceKey, _NCAuthorityDiff] = {} self._created_tokens: dict[TokenUid, TokenDescription] = {} + self._transfers: defaultdict[tuple[Address, TokenUid], int] = defaultdict(int) self._blueprint_id: BlueprintId | None = None self.has_been_commited = False @@ -117,6 +119,15 @@ def get_token(self, token_id: TokenUid) -> TokenDescription: return token_description return self.storage.get_token(token_id) + def add_address_balance( + self, + address: Address, + amount: Amount, + token_id: TokenUid, + ) -> None: + assert amount >= 0 + self._transfers[(address, token_id)] += amount + def get_balance_diff(self) -> MappingProxyType[BalanceKey, int]: """Return the balance diff of this change tracker.""" return MappingProxyType(self._balance_diff) @@ -208,6 +219,9 @@ def commit(self) -> None: token_version=TokenVersion(td.token_version) ) + for (address, token_id), amount in self._transfers.items(): + self.storage.add_address_balance(address, Amount(amount), token_id) + if self._blueprint_id is not None: self.storage.set_blueprint_id(self._blueprint_id) diff --git a/hathor/nanocontracts/storage/contract_storage.py b/hathor/nanocontracts/storage/contract_storage.py index 7bc169fad..3f710ff46 100644 --- a/hathor/nanocontracts/storage/contract_storage.py +++ b/hathor/nanocontracts/storage/contract_storage.py @@ -25,9 +25,9 @@ from hathor.nanocontracts.nc_types.dataclass_nc_type import make_dataclass_nc_type from hathor.nanocontracts.storage.maybedeleted_nc_type import MaybeDeletedNCType from hathor.nanocontracts.storage.patricia_trie import PatriciaTrie -from hathor.nanocontracts.storage.token_proxy import TokenProxy +from hathor.nanocontracts.storage.restricted_block_proxy import RestrictedBlockProxy from hathor.nanocontracts.storage.types import _NOT_PROVIDED, DeletedKey, DeletedKeyType -from hathor.nanocontracts.types import BlueprintId, TokenUid, VertexId +from hathor.nanocontracts.types import Address, Amount, BlueprintId, TokenUid, VertexId from hathor.serialization import Deserializer, Serializer from hathor.transaction.token_info import TokenDescription, TokenVersion @@ -139,7 +139,7 @@ class NCContractStorage: This implementation works for both memory and rocksdb backends.""" - def __init__(self, *, trie: PatriciaTrie, nc_id: VertexId, token_proxy: TokenProxy) -> None: + def __init__(self, *, trie: PatriciaTrie, nc_id: VertexId, block_proxy: RestrictedBlockProxy) -> None: # State (balances, metadata and attributes) self._trie: PatriciaTrie = trie @@ -149,15 +149,15 @@ def __init__(self, *, trie: PatriciaTrie, nc_id: VertexId, token_proxy: TokenPro # Flag to check whether any change or commit can be executed. self.is_locked = False - self._token_proxy = token_proxy + self._block_proxy = block_proxy def has_token(self, token_id: TokenUid) -> bool: """Return True if token_id exists in the current block.""" - return self._token_proxy.has_token(token_id) + return self._block_proxy.has_token(token_id) def get_token(self, token_id: TokenUid) -> TokenDescription: """Get token description for a given token ID.""" - return self._token_proxy.get_token(token_id) + return self._block_proxy.get_token(token_id) def create_token( self, @@ -168,13 +168,16 @@ def create_token( token_version: TokenVersion ) -> None: """Create a new token in the current block.""" - self._token_proxy.create_token( + self._block_proxy.create_token( token_id=token_id, token_name=token_name, token_symbol=token_symbol, token_version=token_version ) + def add_address_balance(self, address: Address, amount: Amount, token_id: TokenUid) -> None: + self._block_proxy.add_address_balance(address, amount, token_id) + def lock(self) -> None: """Lock the storage for changes or commits.""" self.is_locked = True diff --git a/hathor/nanocontracts/storage/token_proxy.py b/hathor/nanocontracts/storage/restricted_block_proxy.py similarity index 84% rename from hathor/nanocontracts/storage/token_proxy.py rename to hathor/nanocontracts/storage/restricted_block_proxy.py index 10306e10a..e76df2999 100644 --- a/hathor/nanocontracts/storage/token_proxy.py +++ b/hathor/nanocontracts/storage/restricted_block_proxy.py @@ -18,11 +18,11 @@ if TYPE_CHECKING: from hathor.nanocontracts.storage.block_storage import NCBlockStorage - from hathor.nanocontracts.types import TokenUid + from hathor.nanocontracts.types import Address, Amount, TokenUid from hathor.transaction.token_info import TokenDescription, TokenVersion -class TokenProxy: +class RestrictedBlockProxy: """A proxy used to limit access to only the tokens method of a block storage. """ def __init__(self, block_storage: NCBlockStorage) -> None: @@ -51,3 +51,7 @@ def create_token( token_symbol=token_symbol, token_version=token_version ) + + def add_address_balance(self, address: Address, amount: Amount, token_id: TokenUid) -> None: + """Proxy to block_storage.add_address_balance().""" + self.__block_storage.add_address_balance(address, amount, token_id) diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index 37392b12b..91e6950de 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -298,6 +298,8 @@ def get_header_from_bytes(self, buf: bytes, *, verbose: VerboseCallback = None) def get_maximum_number_of_headers(self) -> int: """Return the maximum number of headers for this vertex.""" + if self._settings.ENABLE_TRANSFER_HEADER: + return 3 return 2 @classmethod diff --git a/hathor/transaction/headers/__init__.py b/hathor/transaction/headers/__init__.py index 64efadf57..ebb96d3e2 100644 --- a/hathor/transaction/headers/__init__.py +++ b/hathor/transaction/headers/__init__.py @@ -15,6 +15,7 @@ from hathor.transaction.headers.base import VertexBaseHeader from hathor.transaction.headers.fee_header import FeeHeader from hathor.transaction.headers.nano_header import NanoHeader +from hathor.transaction.headers.transfer_header import TransferHeader from hathor.transaction.headers.types import VertexHeaderId __all__ = [ @@ -22,4 +23,5 @@ 'VertexHeaderId', 'NanoHeader', 'FeeHeader', + 'TransferHeader', ] diff --git a/hathor/transaction/headers/transfer_header.py b/hathor/transaction/headers/transfer_header.py new file mode 100644 index 000000000..e4c711059 --- /dev/null +++ b/hathor/transaction/headers/transfer_header.py @@ -0,0 +1,144 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.encoding.bytes import decode_bytes, encode_bytes +from hathor.serialization.encoding.int import decode_int, encode_int +from hathor.serialization.encoding.output_value import decode_output_value, encode_output_value +from hathor.transaction.headers.base import VertexBaseHeader +from hathor.transaction.headers.types import VertexHeaderId +from hathor.transaction.util import VerboseCallback +from hathor.types import Address, Amount, TxOutputScript + +if TYPE_CHECKING: + from hathor.transaction import Transaction + from hathor.transaction.base_transaction import BaseTransaction + + +@dataclass(slots=True, kw_only=True, frozen=True) +class TxTransferInput: + address: Address + amount: Amount + token_index: int + script: TxOutputScript + + +@dataclass(slots=True, kw_only=True, frozen=True) +class TxTransferOutput: + address: Address + amount: Amount + token_index: int + + +@dataclass(slots=True, kw_only=True) +class TransferHeader(VertexBaseHeader): + tx: Transaction + + # TODO: prevent replays. + # seqnum: int + + inputs: list[TxTransferInput] + outputs: list[TxTransferOutput] + + @classmethod + def deserialize( + cls, + tx: BaseTransaction, + buf: bytes, + *, + verbose: VerboseCallback = None + ) -> tuple[TransferHeader, bytes]: + from hathor.transaction import Transaction + assert isinstance(tx, Transaction) + + deserializer = Deserializer.build_bytes_deserializer(buf) + + header_id = deserializer.read_bytes(1) + if verbose: + verbose('header_id', header_id) + assert header_id == VertexHeaderId.TRANSFER_HEADER.value + + inputs_len = decode_int(deserializer, length=1, signed=False) + if verbose: + verbose('inputs_len', inputs_len) + + inputs_: list[TxTransferInput] = [] + for _ in range(inputs_len): + address = decode_bytes(deserializer) + amount = decode_output_value(deserializer, strict=True) + token_index = decode_int(deserializer, length=1, signed=False) + script = decode_bytes(deserializer) + inputs_.append(TxTransferInput( + address=address, + amount=amount, + token_index=token_index, + script=script, + )) + + outputs_len = decode_int(deserializer, length=1, signed=False) + if verbose: + verbose('outputs_len', outputs_len) + + outputs_: list[TxTransferOutput] = [] + for _ in range(outputs_len): + address = decode_bytes(deserializer) + amount = decode_output_value(deserializer, strict=True) + token_index = decode_int(deserializer, length=1, signed=False) + outputs_.append(TxTransferOutput( + address=address, + amount=amount, + token_index=token_index, + )) + + transfer_header = TransferHeader( + tx=tx, + inputs=inputs_, + outputs=outputs_, + ) + + return transfer_header, bytes(deserializer.read_all()) + + def _serialize_without_header_id(self, serializer: Serializer, *, skip_signature: bool) -> None: + """Serialize the header with the option to skip the signature.""" + encode_int(serializer, len(self.inputs), length=1, signed=False) + for txin in self.inputs: + encode_bytes(serializer, txin.address) + encode_output_value(serializer, txin.amount, strict=True) + encode_int(serializer, txin.token_index, length=1, signed=False) + if not skip_signature: + encode_bytes(serializer, txin.script) + else: + encode_bytes(serializer, b'') + + encode_int(serializer, len(self.outputs), length=1, signed=False) + for txout in self.outputs: + encode_bytes(serializer, txout.address) + encode_output_value(serializer, txout.amount, strict=True) + encode_int(serializer, txout.token_index, length=1, signed=False) + + def serialize(self) -> bytes: + serializer = Serializer.build_bytes_serializer() + serializer.write_bytes(VertexHeaderId.TRANSFER_HEADER.value) + self._serialize_without_header_id(serializer, skip_signature=False) + return bytes(serializer.finalize()) + + def get_sighash_bytes(self) -> bytes: + serializer = Serializer.build_bytes_serializer() + self._serialize_without_header_id(serializer, skip_signature=True) + return bytes(serializer.finalize()) diff --git a/hathor/transaction/headers/types.py b/hathor/transaction/headers/types.py index 7b45b8a8e..8c0b0732d 100644 --- a/hathor/transaction/headers/types.py +++ b/hathor/transaction/headers/types.py @@ -19,3 +19,4 @@ class VertexHeaderId(Enum): NANO_HEADER = b'\x10' FEE_HEADER = b'\x11' + TRANSFER_HEADER = b'\x12' diff --git a/hathor/transaction/transaction.py b/hathor/transaction/transaction.py index f2336bf35..60abeff37 100644 --- a/hathor/transaction/transaction.py +++ b/hathor/transaction/transaction.py @@ -26,7 +26,7 @@ from hathor.transaction import TxInput, TxOutput, TxVersion from hathor.transaction.base_transaction import TX_HASH_SIZE, GenericVertex from hathor.transaction.exceptions import InvalidToken -from hathor.transaction.headers import NanoHeader, VertexBaseHeader +from hathor.transaction.headers import NanoHeader, TransferHeader, VertexBaseHeader from hathor.transaction.headers.fee_header import FeeHeader from hathor.transaction.static_metadata import TransactionStaticMetadata from hathor.transaction.token_info import TokenInfo, TokenInfoDict, TokenVersion, get_token_version @@ -125,6 +125,15 @@ def has_fees(self) -> bool: else: return True + def has_transfer_header(self) -> bool: + """Returns true if this transaction has a transfer header.""" + try: + self.get_transfer_header() + except ValueError: + return False + else: + return True + def get_nano_header(self) -> NanoHeader: """Return the NanoHeader or raise ValueError.""" return self._get_header(NanoHeader) @@ -140,6 +149,10 @@ def _get_header(self, header_type: type[T]) -> T: return header raise ValueError(f'{header_type.__name__.lower()} not found') + def get_transfer_header(self) -> TransferHeader: + """Return the TransferHeader or raise ValueError.""" + return self._get_header(TransferHeader) + @classmethod def create_from_struct(cls, struct_bytes: bytes, storage: Optional['TransactionStorage'] = None, *, verbose: VerboseCallback = None) -> Self: @@ -338,7 +351,8 @@ def get_complete_token_info(self, nc_block_storage: NCBlockStorage) -> TokenInfo token_dict = self._get_token_info_from_inputs(nc_block_storage) self._update_token_info_from_nano_actions(token_dict=token_dict, nc_block_storage=nc_block_storage) - # These must be called last so token_dict already contains all tokens in inputs and nano actions. + self._update_token_info_from_transfer_header(token_dict=token_dict) + # This one must be called last so token_dict already contains all tokens in inputs and nano actions. self._update_token_info_from_outputs(token_dict=token_dict) self._update_token_info_from_fees(token_dict=token_dict) @@ -351,6 +365,28 @@ def get_minimum_number_of_inputs(self) -> int: return 0 return 1 + def _update_token_info_from_transfer_header(self, *, token_dict: TokenInfoDict) -> None: + if not self.has_transfer_header(): + return + + transfer_header = self.get_transfer_header() + + for input_ in transfer_header.inputs: + token_uid = self.get_token_uid(input_.token_index) + token_info = token_dict.get(token_uid) + if token_info is None: + raise InvalidToken('no inputs/actions for token {}'.format(token_uid.hex())) + token_info.amount += input_.amount + token_dict[token_uid] = token_info + + for output_ in transfer_header.outputs: + token_uid = self.get_token_uid(output_.token_index) + token_info = token_dict.get(token_uid) + if token_info is None: + raise InvalidToken('no inputs/actions for token {}'.format(token_uid.hex())) + token_info.amount -= output_.amount + token_dict[token_uid] = token_info + def _update_token_info_from_nano_actions( self, *, diff --git a/hathor/transaction/vertex_parser.py b/hathor/transaction/vertex_parser.py index 85850a18a..455e4f3b4 100644 --- a/hathor/transaction/vertex_parser.py +++ b/hathor/transaction/vertex_parser.py @@ -17,7 +17,7 @@ from struct import error as StructError from typing import TYPE_CHECKING, Type -from hathor.transaction.headers import FeeHeader, NanoHeader, VertexBaseHeader, VertexHeaderId +from hathor.transaction.headers import FeeHeader, NanoHeader, TransferHeader, VertexBaseHeader, VertexHeaderId if TYPE_CHECKING: from hathor.conf.settings import HathorSettings @@ -39,6 +39,8 @@ def get_supported_headers(settings: HathorSettings) -> dict[VertexHeaderId, Type supported_headers[VertexHeaderId.NANO_HEADER] = NanoHeader if settings.ENABLE_FEE_BASED_TOKENS: supported_headers[VertexHeaderId.FEE_HEADER] = FeeHeader + if settings.ENABLE_TRANSFER_HEADER: + supported_headers[VertexHeaderId.TRANSFER_HEADER] = TransferHeader return supported_headers @staticmethod diff --git a/hathor/verification/transfer_header_verifier.py b/hathor/verification/transfer_header_verifier.py new file mode 100644 index 000000000..61ea7f4d5 --- /dev/null +++ b/hathor/verification/transfer_header_verifier.py @@ -0,0 +1,109 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from hathor.conf.settings import HathorSettings +from hathor.nanocontracts.exception import NCInsufficientFunds, NCInvalidSignature +from hathor.nanocontracts.types import Address, TokenUid as NCTokenUid +from hathor.transaction import Transaction +from hathor.transaction.exceptions import InvalidToken, ScriptError, TooManyInputs, TooManyOutputs, TooManySigOps +from hathor.transaction.headers.nano_header import ADDRESS_LEN_BYTES +from hathor.transaction.scripts import SigopCounter, create_output_script +from hathor.transaction.scripts.execute import ScriptExtras, raw_script_eval +from hathor.transaction.scripts.opcode import OpcodesVersion +from hathor.transaction.storage import TransactionStorage +from hathor.verification.verification_params import VerificationParams + +MAX_INPUTS: int = 16 +MAX_OUTPUTS: int = 16 +MAX_SCRIPT_SIZE: int = 1024 +MAX_SCRIPT_SIGOPS_COUNT: int = 20 + + +class TransferHeaderVerifier: + __slots__ = ('_settings', '_tx_storage') + + def __init__(self, *, settings: HathorSettings, tx_storage: TransactionStorage) -> None: + self._settings = settings + self._tx_storage = tx_storage + + def verify_inputs_and_outputs(self, tx: Transaction) -> None: + transfer_header = tx.get_transfer_header() + if len(transfer_header.inputs) > MAX_INPUTS: + raise TooManyInputs + + if len(transfer_header.outputs) > MAX_OUTPUTS: + raise TooManyOutputs + + seen: set[tuple[Address, int]] = set() + + for input_ in transfer_header.inputs: + assert input_.amount > 0 + self._verify_signature(tx, input_.address, input_.script) + entry = (Address(input_.address), input_.token_index) + if entry in seen: + raise InvalidToken('only one token id is allowed for each address') + seen.add(entry) + + for output_ in transfer_header.outputs: + assert output_.amount > 0 + if output_.token_index > len(tx.tokens): + raise InvalidToken('token uid index not available: index {}'.format(output_.token_index)) + entry = (Address(output_.address), output_.token_index) + if entry in seen: + raise InvalidToken('only one token id is allowed for each address') + seen.add(entry) + + def _verify_signature(self, tx: Transaction, address: bytes, script: bytes) -> None: + """Verify if the caller's signature is valid.""" + if len(address) != ADDRESS_LEN_BYTES: + raise NCInvalidSignature(f'invalid address: {address.hex()}') + + if len(script) > MAX_SCRIPT_SIZE: + raise NCInvalidSignature( + f'script larger than max: {len(script)} > {MAX_SCRIPT_SIZE}' + ) + + counter = SigopCounter( + max_multisig_pubkeys=self._settings.MAX_MULTISIG_PUBKEYS, + enable_checkdatasig_count=True, + ) + output_script = create_output_script(address) + sigops_count = counter.get_sigops_count(script, output_script) + if sigops_count > MAX_SCRIPT_SIGOPS_COUNT: + raise TooManySigOps(f'sigops count greater than max: {sigops_count} > {MAX_SCRIPT_SIGOPS_COUNT}') + + try: + raw_script_eval( + input_data=script, + output_script=output_script, + extras=ScriptExtras(tx=tx, version=OpcodesVersion.V2) + ) + except ScriptError as e: + raise NCInvalidSignature from e + + def verify_balances(self, tx: Transaction, params: VerificationParams) -> None: + if not params.harden_nano_restrictions: + return + + transfer_header = tx.get_transfer_header() + best_block = self._tx_storage.get_best_block() + block_storage = self._tx_storage.get_nc_block_storage(best_block) + + for txin in transfer_header.inputs: + token_uid = NCTokenUid(tx.get_token_uid(txin.token_index)) + balance = block_storage.get_address_balance(Address(txin.address), token_uid) + if txin.amount > balance: + raise NCInsufficientFunds diff --git a/hathor/verification/verification_params.py b/hathor/verification/verification_params.py index e677d09f2..ba66d4775 100644 --- a/hathor/verification/verification_params.py +++ b/hathor/verification/verification_params.py @@ -51,6 +51,7 @@ def default_for_mempool(cls, *, best_block: Block, features: Features | None = N nanocontracts=True, fee_tokens=False, opcodes_version=OpcodesVersion.V2, + transfer_headers=False, ) return cls( diff --git a/hathor/verification/verification_service.py b/hathor/verification/verification_service.py index 6f5ec9476..f6f409307 100644 --- a/hathor/verification/verification_service.py +++ b/hathor/verification/verification_service.py @@ -272,6 +272,8 @@ def _verify_tx( # if this tx isn't a nano contract we assume we can find all the tokens to validate this tx allow_nonexistent_tokens=tx.is_nano_contract() ) + if tx.has_transfer_header(): + self.verifiers.transfer_header.verify_balances(tx, params) self.verifiers.vertex.verify_parents(tx) self.verifiers.tx.verify_conflict(tx, params) if params.reject_locked_reward: @@ -294,6 +296,8 @@ def verify_without_storage(self, vertex: BaseTransaction, params: VerificationPa if vertex.has_fees(): self._verify_without_storage_fee_header(vertex) + if isinstance(vertex, Transaction) and vertex.has_transfer_header() and params.features.transfer_headers: + self._verify_without_storage_transfer_header(vertex) # We assert with type() instead of isinstance() because each subclass has a specific branch. match vertex.version: @@ -369,6 +373,10 @@ def _verify_without_storage_fee_header(self, tx: BaseTransaction) -> None: assert isinstance(tx, Transaction) FeeHeaderVerifier.verify_fee_list(tx.get_fee_header(), tx) + def _verify_without_storage_transfer_header(self, tx: Transaction) -> None: + assert tx.has_transfer_header() + self.verifiers.transfer_header.verify_inputs_and_outputs(tx) + def _verify_without_storage_on_chain_blueprint( self, tx: OnChainBlueprint, diff --git a/hathor/verification/vertex_verifier.py b/hathor/verification/vertex_verifier.py index e8045d914..0f8ec69e5 100644 --- a/hathor/verification/vertex_verifier.py +++ b/hathor/verification/vertex_verifier.py @@ -35,7 +35,7 @@ TooManyOutputs, TooManySigOps, ) -from hathor.transaction.headers import FeeHeader, NanoHeader, VertexBaseHeader +from hathor.transaction.headers import FeeHeader, NanoHeader, TransferHeader, VertexBaseHeader from hathor.verification.verification_params import VerificationParams # tx should have 2 parents, both other transactions @@ -230,6 +230,8 @@ def get_allowed_headers(self, vertex: BaseTransaction, params: VerificationParam allowed_headers.add(NanoHeader) if params.features.fee_tokens: allowed_headers.add(FeeHeader) + if params.features.transfer_headers: + allowed_headers.add(TransferHeader) case _: # pragma: no cover assert_never(vertex.version) return allowed_headers diff --git a/hathor/verification/vertex_verifiers.py b/hathor/verification/vertex_verifiers.py index 0d1d330f1..64c423dad 100644 --- a/hathor/verification/vertex_verifiers.py +++ b/hathor/verification/vertex_verifiers.py @@ -26,6 +26,7 @@ from hathor.verification.poa_block_verifier import PoaBlockVerifier from hathor.verification.token_creation_transaction_verifier import TokenCreationTransactionVerifier from hathor.verification.transaction_verifier import TransactionVerifier +from hathor.verification.transfer_header_verifier import TransferHeaderVerifier from hathor.verification.vertex_verifier import VertexVerifier @@ -38,6 +39,7 @@ class VertexVerifiers(NamedTuple): tx: TransactionVerifier token_creation_tx: TokenCreationTransactionVerifier nano_header: NanoHeaderVerifier + transfer_header: TransferHeaderVerifier on_chain_blueprint: OnChainBlueprintVerifier @classmethod @@ -90,6 +92,7 @@ def create( tx_verifier = TransactionVerifier(settings=settings, daa=daa, feature_service=feature_service) token_creation_tx_verifier = TokenCreationTransactionVerifier(settings=settings) nano_header_verifier = NanoHeaderVerifier(settings=settings, tx_storage=tx_storage) + transfer_header_verifier = TransferHeaderVerifier(settings=settings, tx_storage=tx_storage) on_chain_blueprint_verifier = OnChainBlueprintVerifier(settings=settings) return VertexVerifiers( @@ -100,5 +103,6 @@ def create( tx=tx_verifier, token_creation_tx=token_creation_tx_verifier, nano_header=nano_header_verifier, + transfer_header=transfer_header_verifier, on_chain_blueprint=on_chain_blueprint_verifier, ) diff --git a/hathor_tests/nanocontracts/test_address_balance_transfer.py b/hathor_tests/nanocontracts/test_address_balance_transfer.py new file mode 100644 index 000000000..6864c9d31 --- /dev/null +++ b/hathor_tests/nanocontracts/test_address_balance_transfer.py @@ -0,0 +1,86 @@ +import pytest + +from hathor import HATHOR_TOKEN_UID, Address, Amount, Blueprint, Context, ContractId, public +from hathor.nanocontracts.exception import NCInvalidSyscall +from hathor.nanocontracts.types import TokenUid +from hathor.transaction.token_info import TokenVersion +from hathor_tests.nanocontracts.blueprints.unittest import BlueprintTestCase + + +class AddressTransferBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + def transfer(self, ctx: Context, to_address: Address, amount: Amount, token: TokenUid) -> None: + self.syscall.transfer_to_address(to_address, amount, token) + + @public + def transfer_to_contract_id(self, ctx: Context, contract_id: ContractId, amount: Amount, token: TokenUid) -> None: + self.syscall.transfer_to_address(Address(contract_id), amount, token) + + +class TestAddressBalanceTransfer(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + self.blueprint_id = self._register_blueprint_class(AddressTransferBlueprint) + self.contract_id = self.gen_random_contract_id() + self.runner.create_contract(self.contract_id, self.blueprint_id, self.create_context()) + + def test_transfer_to_address_persists_in_block_storage(self) -> None: + destination = self.gen_random_address() + token_uid = TokenUid(HATHOR_TOKEN_UID) + self.runner.call_public_method(self.contract_id, 'transfer', self.create_context(), destination, 7, token_uid) + + balance = self.runner.block_storage.get_address_balance(destination, token_uid) + # Phase 2 expectation: this should become 7 once commit-time transfer persistence is enabled. + assert balance == 7 + + def test_transfer_to_address_rejects_contract_id(self) -> None: + destination_contract = self.gen_random_contract_id() + token_uid = TokenUid(HATHOR_TOKEN_UID) + + with pytest.raises(NCInvalidSyscall, match='address'): + self.runner.call_public_method( + self.contract_id, + 'transfer_to_contract_id', + self.create_context(), + destination_contract, + 7, + token_uid, + ) + + def test_transfer_to_address_supports_custom_token(self) -> None: + destination = self.gen_random_address() + custom_token_uid = self.gen_random_token_uid() + self.create_token(custom_token_uid, 'Custom Token', 'CTK', TokenVersion.DEPOSIT) + + self.runner.call_public_method( + self.contract_id, + 'transfer', + self.create_context(), + destination, + 7, + custom_token_uid, + ) + + custom_balance = self.runner.block_storage.get_address_balance(destination, custom_token_uid) + assert custom_balance == 7 + + htr_balance = self.runner.block_storage.get_address_balance(destination, TokenUid(HATHOR_TOKEN_UID)) + assert htr_balance == 0 + + def test_transfer_to_address_rejects_unknown_token(self) -> None: + destination = self.gen_random_address() + unknown_token_uid = self.gen_random_token_uid() + + with pytest.raises(NCInvalidSyscall, match='could not find'): + self.runner.call_public_method( + self.contract_id, + 'transfer', + self.create_context(), + destination, + 7, + unknown_token_uid, + ) diff --git a/hathor_tests/nanocontracts/test_consensus.py b/hathor_tests/nanocontracts/test_consensus.py index 704622112..aee351e1c 100644 --- a/hathor_tests/nanocontracts/test_consensus.py +++ b/hathor_tests/nanocontracts/test_consensus.py @@ -1,6 +1,7 @@ from typing import Any, cast from hathor.conf import HathorSettings +from hathor.conf.settings import FeatureSetting from hathor.crypto.util import get_address_from_public_key_bytes from hathor.exception import InvalidNewTransaction from hathor.nanocontracts import NC_EXECUTION_FAIL_ID, Blueprint, Context, public @@ -9,7 +10,7 @@ from hathor.nanocontracts.method import Method from hathor.nanocontracts.nc_types import make_nc_type_for_arg_type as make_nc_type from hathor.nanocontracts.storage.contract_storage import Balance -from hathor.nanocontracts.types import NCAction, NCActionType, NCDepositAction, NCWithdrawalAction, TokenUid +from hathor.nanocontracts.types import Address, NCAction, NCActionType, NCDepositAction, NCWithdrawalAction, TokenUid from hathor.nanocontracts.utils import sign_pycoin from hathor.simulator.trigger import StopAfterMinimumBalance, StopAfterNMinedBlocks from hathor.simulator.utils import add_new_blocks @@ -74,6 +75,256 @@ def fail_on_zero(self, ctx: Context) -> None: raise NCFail('counter is zero') +class AddressBalanceBlueprint(Blueprint): + token_uid: TokenUid + + @public + def initialize(self, ctx: Context, token_uid: TokenUid) -> None: + self.token_uid = token_uid + + @public + def nop(self, ctx: Context) -> None: + pass + + @public + def transfer_to_caller(self, ctx: Context, amount: int) -> None: + self.syscall.transfer_to_address(Address(ctx.caller_id), amount, self.token_uid) + + @public + def transfer_to_caller_and_fail(self, ctx: Context, amount: int) -> None: + self.syscall.transfer_to_address(Address(ctx.caller_id), amount, self.token_uid) + raise NCFail('forced failure') + + +class NCAddressBalanceConsensusTestCase(SimulatorTestCase): + __test__ = True + + def setUp(self) -> None: + super().setUp() + self.blueprint_id = b'z' * 32 + self.simulator.settings = self.simulator.settings._replace(ENABLE_TRANSFER_HEADER=FeatureSetting.ENABLED) + self.manager = self.simulator.create_peer() + self.manager.allow_mining_without_peers() + self.manager.tx_storage.nc_catalog = NCBlueprintCatalog({ + self.blueprint_id: AddressBalanceBlueprint, + }) + self.dag_builder = TestDAGBuilder.from_manager(self.manager) + + def _get_address_balance(self, address: bytes, *, block: Block | None = None) -> int: + if block is None: + block = self.manager.tx_storage.get_best_block() + block_storage = self.manager.get_nc_block_storage(block) + return block_storage.get_address_balance(Address(address), TokenUid(settings.HATHOR_TOKEN_UID)) + + def _assert_tx_success(self, tx: Transaction) -> None: + meta = tx.get_metadata() + assert meta.nc_execution == NCExecutionState.SUCCESS + assert meta.voided_by is None + + def _assert_tx_failure(self, tx: Transaction) -> None: + meta = tx.get_metadata() + assert meta.nc_execution == NCExecutionState.FAILURE + assert meta.voided_by == {tx.hash, NC_EXECUTION_FAIL_ID} + + def test_execution_with_transfer_header_is_successful(self) -> None: + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..33] + b10 < dummy + + tx_init.nc_id = "{self.blueprint_id.hex()}" + tx_init.nc_method = initialize("00") + tx_init.nc_address = wallet_contract + tx_init.nc_seqnum = 0 + + tx_seed.nc_id = tx_init + tx_seed.nc_method = transfer_to_caller(9) + tx_seed.nc_address = wallet_sender + tx_seed.nc_seqnum = 0 + + tx_header.nc_id = tx_init + tx_header.nc_method = nop() + tx_header.nc_address = wallet_sender + tx_header.nc_seqnum = 1 + tx_header.nc_transfer_input = 4 HTR wallet_sender + tx_header.nc_transfer_output = 4 HTR wallet_receiver + + tx_init <-- b30 + tx_seed <-- b31 + tx_header <-- b32 + ''') + + artifacts.propagate_with(self.manager) + + tx_header = artifacts.get_typed_vertex('tx_header', Transaction) + b31, b32 = artifacts.get_typed_vertices(['b31', 'b32'], Block) + self._assert_tx_success(tx_header) + + transfer_header = tx_header.get_transfer_header() + sender = transfer_header.inputs[0].address + receiver = transfer_header.outputs[0].address + + assert self._get_address_balance(sender, block=b31) == 9 + assert self._get_address_balance(receiver, block=b31) == 0 + assert self._get_address_balance(sender, block=b32) == 5 + assert self._get_address_balance(receiver, block=b32) == 4 + + def test_address_balance_execution_with_transfer_from_contract_is_successful(self) -> None: + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..32] + b10 < dummy + + tx_init.nc_id = "{self.blueprint_id.hex()}" + tx_init.nc_method = initialize("00") + tx_init.nc_address = wallet_contract + tx_init.nc_seqnum = 0 + + tx_contract.nc_id = tx_init + tx_contract.nc_method = transfer_to_caller(7) + tx_contract.nc_address = wallet_caller + tx_contract.nc_seqnum = 0 + + tx_init <-- b30 + tx_contract <-- b31 + ''') + + artifacts.propagate_with(self.manager) + + tx_contract = artifacts.get_typed_vertex('tx_contract', Transaction) + self._assert_tx_success(tx_contract) + + caller = tx_contract.get_nano_header().nc_address + assert self._get_address_balance(caller) == 7 + + def test_execution_with_both_transfer_header_and_contract_transfer_is_successful(self) -> None: + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..33] + b10 < dummy + + tx_init.nc_id = "{self.blueprint_id.hex()}" + tx_init.nc_method = initialize("00") + tx_init.nc_address = wallet_contract + tx_init.nc_seqnum = 0 + + tx_seed.nc_id = tx_init + tx_seed.nc_method = transfer_to_caller(10) + tx_seed.nc_address = wallet_sender + tx_seed.nc_seqnum = 0 + + tx_combined.nc_id = tx_init + tx_combined.nc_method = transfer_to_caller(3) + tx_combined.nc_address = wallet_caller + tx_combined.nc_seqnum = 0 + tx_combined.nc_transfer_input = 4 HTR wallet_sender + tx_combined.nc_transfer_output = 4 HTR wallet_receiver + + tx_init <-- b30 + tx_seed <-- b31 + tx_combined <-- b32 + ''') + + artifacts.propagate_with(self.manager) + + tx_combined = artifacts.get_typed_vertex('tx_combined', Transaction) + self._assert_tx_success(tx_combined) + + transfer_header = tx_combined.get_transfer_header() + sender = transfer_header.inputs[0].address + receiver = transfer_header.outputs[0].address + caller = tx_combined.get_nano_header().nc_address + + assert self._get_address_balance(sender) == 6 + assert self._get_address_balance(receiver) == 4 + assert self._get_address_balance(caller) == 3 + + def test_address_balance_execution_failure_not_due_to_transfer_does_not_persist_transfers(self) -> None: + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..33] + b10 < dummy + + tx_init.nc_id = "{self.blueprint_id.hex()}" + tx_init.nc_method = initialize("00") + tx_init.nc_address = wallet_contract + tx_init.nc_seqnum = 0 + + tx_seed.nc_id = tx_init + tx_seed.nc_method = transfer_to_caller(10) + tx_seed.nc_address = wallet_sender + tx_seed.nc_seqnum = 0 + + tx_fail.nc_id = tx_init + tx_fail.nc_method = transfer_to_caller_and_fail(3) + tx_fail.nc_address = wallet_caller + tx_fail.nc_seqnum = 0 + tx_fail.nc_transfer_input = 4 HTR wallet_sender + tx_fail.nc_transfer_output = 4 HTR wallet_receiver + + tx_init <-- b30 + tx_seed <-- b31 + tx_fail <-- b32 + ''') + + artifacts.propagate_with(self.manager) + + tx_fail = artifacts.get_typed_vertex('tx_fail', Transaction) + b31, b32 = artifacts.get_typed_vertices(['b31', 'b32'], Block) + self._assert_tx_failure(tx_fail) + + transfer_header = tx_fail.get_transfer_header() + sender = transfer_header.inputs[0].address + receiver = transfer_header.outputs[0].address + caller = tx_fail.get_nano_header().nc_address + + assert self._get_address_balance(sender, block=b31) == 10 + assert self._get_address_balance(receiver, block=b31) == 0 + assert self._get_address_balance(caller, block=b31) == 0 + + assert self._get_address_balance(sender, block=b32) == 10 + assert self._get_address_balance(receiver, block=b32) == 0 + assert self._get_address_balance(caller, block=b32) == 0 + + def test_address_balance_execution_failure_due_to_transfer_low_balance(self) -> None: + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..33] + b10 < dummy + + tx_init.nc_id = "{self.blueprint_id.hex()}" + tx_init.nc_method = initialize("00") + tx_init.nc_address = wallet_contract + tx_init.nc_seqnum = 0 + + tx_seed.nc_id = tx_init + tx_seed.nc_method = transfer_to_caller(5) + tx_seed.nc_address = wallet_sender + tx_seed.nc_seqnum = 0 + + tx_low.nc_id = tx_init + tx_low.nc_method = nop() + tx_low.nc_address = wallet_sender + tx_low.nc_seqnum = 1 + tx_low.nc_transfer_input = 6 HTR wallet_sender + tx_low.nc_transfer_output = 6 HTR wallet_receiver + + tx_init <-- b30 + tx_seed <-- b31 + tx_low <-- b32 + ''') + + artifacts.propagate_with(self.manager) + + tx_low = artifacts.get_typed_vertex('tx_low', Transaction) + b31, b32 = artifacts.get_typed_vertices(['b31', 'b32'], Block) + self._assert_tx_failure(tx_low) + + transfer_header = tx_low.get_transfer_header() + sender = transfer_header.inputs[0].address + receiver = transfer_header.outputs[0].address + + assert self._get_address_balance(sender, block=b31) == 5 + assert self._get_address_balance(receiver, block=b31) == 0 + assert self._get_address_balance(sender, block=b32) == 5 + assert self._get_address_balance(receiver, block=b32) == 0 + + class NCConsensusTestCase(SimulatorTestCase): __test__ = True diff --git a/hathor_tests/nanocontracts/test_storage.py b/hathor_tests/nanocontracts/test_storage.py index 5f788687c..bf944761f 100644 --- a/hathor_tests/nanocontracts/test_storage.py +++ b/hathor_tests/nanocontracts/test_storage.py @@ -1,9 +1,10 @@ from typing import TypeVar +from hathor.conf.settings import HATHOR_TOKEN_UID from hathor.nanocontracts import NCRocksDBStorageFactory from hathor.nanocontracts.nc_types import NCType, NullNCType, make_nc_type_for_arg_type as make_nc_type from hathor.nanocontracts.storage import NCChangesTracker -from hathor.nanocontracts.types import Amount, ContractId, Timestamp, VertexId +from hathor.nanocontracts.types import Address, Amount, ContractId, Timestamp, TokenUid, VertexId from hathor_tests import unittest T = TypeVar('T') @@ -19,8 +20,8 @@ def setUp(self) -> None: super().setUp() rocksdb_storage = self.create_rocksdb_storage() factory = NCRocksDBStorageFactory(rocksdb_storage) - block_storage = factory.get_empty_block_storage() - self.storage = block_storage.get_empty_contract_storage(ContractId(VertexId(b''))) + self.block_storage = factory.get_empty_block_storage() + self.storage = self.block_storage.get_empty_contract_storage(ContractId(VertexId(b''))) super().setUp() def _run_test(self, data_in: T, value: NCType[T]) -> None: @@ -135,3 +136,10 @@ def test_changes_tracker_early_error(self) -> None: with self.assertRaises(TypeError): # inner string is not int changes_tracker.put_obj(b'y', nested_nc_type, {1: {'foo'}}) # type: ignore[misc] + + def test_add_address_balance_rejects_contract_id(self) -> None: + contract_id = ContractId(VertexId(b'c' * 32)) + token_uid = TokenUid(HATHOR_TOKEN_UID) + + with self.assertRaises(ValueError): + self.block_storage.add_address_balance(Address(contract_id), Amount(1), token_uid) diff --git a/hathor_tests/nanocontracts/test_syscalls_in_view.py b/hathor_tests/nanocontracts/test_syscalls_in_view.py index 440ecf29c..7d987c190 100644 --- a/hathor_tests/nanocontracts/test_syscalls_in_view.py +++ b/hathor_tests/nanocontracts/test_syscalls_in_view.py @@ -17,7 +17,7 @@ from hathor.nanocontracts import Blueprint, Context, public, view from hathor.nanocontracts.blueprint_env import BlueprintEnvironment from hathor.nanocontracts.exception import NCViewMethodError -from hathor.nanocontracts.types import BlueprintId, ContractId, TokenUid, VertexId +from hathor.nanocontracts.types import Address, Amount, BlueprintId, ContractId, TokenUid, VertexId from hathor_tests.nanocontracts.blueprints.unittest import BlueprintTestCase @@ -110,6 +110,10 @@ def get_proxy(self) -> None: def setup_new_contract(self) -> None: self.syscall.setup_new_contract(BlueprintId(VertexId(b'')), salt=b'') + @view + def transfer_to_address(self) -> None: + self.syscall.transfer_to_address(Address(b''), amount=Amount(0), token=TokenUid(b'')) + class IndirectSyscalls(Blueprint): other_blueprint_id: BlueprintId | None diff --git a/hathor_tests/tx/test_transfer_header.py b/hathor_tests/tx/test_transfer_header.py new file mode 100644 index 000000000..e6b853257 --- /dev/null +++ b/hathor_tests/tx/test_transfer_header.py @@ -0,0 +1,74 @@ +from hathor.feature_activation.utils import Features +from hathor.transaction import Transaction +from hathor.transaction.headers.transfer_header import TransferHeader, TxTransferInput, TxTransferOutput +from hathor_tests import unittest +from hathor_tests.dag_builder.builder import TestDAGBuilder + + +class TransferHeaderTest(unittest.TestCase): + def test_transfer_header_round_trip(self) -> None: + tx = Transaction() + tx.tokens = [b't' * 32] + + header = TransferHeader( + tx=tx, + inputs=[ + TxTransferInput( + address=b'\x01' * 25, + amount=10, + token_index=0, + script=b'\x30' * 65, + ), + ], + outputs=[ + TxTransferOutput( + address=b'\x02' * 25, + amount=10, + token_index=0, + ), + ], + ) + + serialized = header.serialize() + deserialized, remaining = TransferHeader.deserialize(tx, serialized) + assert remaining == b'' + assert deserialized.inputs == header.inputs + assert deserialized.outputs == header.outputs + assert deserialized.get_sighash_bytes() != serialized + + def test_regular_tx_without_transfer_header_keeps_token_info_path(self) -> None: + manager = self.create_peer('unittests') + tx = Transaction(storage=manager.tx_storage) + best_block = manager.tx_storage.get_best_block() + block_storage = manager.get_nc_block_storage(best_block) + + token_info = tx.get_complete_token_info(block_storage) + assert tx.has_transfer_header() is False + assert manager._settings.HATHOR_TOKEN_UID in token_info + + def test_dag_builder_emits_transfer_header_only_when_configured(self) -> None: + manager = self.create_peer('unittests') + dag_builder = TestDAGBuilder.from_manager(manager) + artifacts = dag_builder.build_from_str(''' + blockchain genesis b[1..5] + + tx1.out[0] = 100 HTR + tx2.out[0] = 100 HTR + tx2.nc_transfer_input = 10 HTR main + tx2.nc_transfer_output = 10 HTR main + + b1 < tx1 < tx2 < b2 + ''') + + tx1 = artifacts.get_typed_vertex('tx1', Transaction) + tx2 = artifacts.get_typed_vertex('tx2', Transaction) + assert tx1.has_transfer_header() is False + assert tx2.has_transfer_header() is True + + def test_settings_exposes_transfer_header_flag(self) -> None: + # Phase 2 expectation: this should exist after transfer-header feature flag is added. + assert hasattr(self._settings, 'ENABLE_TRANSFER_HEADER') + + def test_features_struct_exposes_transfer_headers_field(self) -> None: + # Phase 2 expectation: this should exist after transfer-header feature state is added. + assert 'transfer_headers' in Features.__annotations__ diff --git a/hathor_tests/tx/test_verification_mempool.py b/hathor_tests/tx/test_verification_mempool.py index 134164d0d..8394ce1f3 100644 --- a/hathor_tests/tx/test_verification_mempool.py +++ b/hathor_tests/tx/test_verification_mempool.py @@ -3,6 +3,7 @@ from twisted.internet.defer import inlineCallbacks from hathor.checkpoint import Checkpoint +from hathor.conf.settings import FeatureSetting from hathor.exception import InvalidNewTransaction from hathor.nanocontracts import NC_EXECUTION_FAIL_ID, Blueprint, Context, fallback, public from hathor.nanocontracts.exception import ( @@ -10,6 +11,7 @@ NanoContractDoesNotExist, NCFail, NCForbiddenAction, + NCInsufficientFunds, NCInvalidMethodCall, NCInvalidSeqnum, NCMethodNotFound, @@ -66,6 +68,46 @@ def fallback(self, ctx: Context, method_name: str, nc_args: NCArgs) -> None: assert method_name == 'unknown' +class TransferHeaderMempoolValidationTest(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + self.blueprint_id = b'c' * 32 + settings = self._settings._replace(ENABLE_TRANSFER_HEADER=FeatureSetting.ENABLED) + self.manager = self.create_peer('unittests', settings=settings) + self.manager.tx_storage.nc_catalog.blueprints[self.blueprint_id] = MyTestBlueprint + self.dag_builder = TestDAGBuilder.from_manager(self.manager) + + def test_transfer_header_input_rejects_when_amount_exceeds_available_balance(self) -> None: + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..32] + b10 < dummy + + tx_init.nc_id = "{self.blueprint_id.hex()}" + tx_init.nc_method = initialize() + tx_init.nc_address = wallet1 + tx_init.nc_seqnum = 0 + + tx_transfer.nc_id = tx_init + tx_transfer.nc_method = nop() + tx_transfer.nc_address = wallet1 + tx_transfer.nc_seqnum = 1 + tx_transfer.nc_transfer_input = 1 HTR wallet1 + tx_transfer.nc_transfer_output = 1 HTR wallet2 + + tx_init <-- b30 + b30 < tx_transfer + ''') + artifacts.propagate_with(self.manager, up_to='b30') + tx_transfer = artifacts.get_typed_vertex('tx_transfer', Transaction) + + tx_transfer.timestamp = int(self.manager.reactor.seconds()) + self.dag_builder._exporter._vertex_resolver(tx_transfer) + + with self.assertRaises(InvalidNewTransaction) as e: + self.manager.vertex_handler.on_new_mempool_transaction(tx_transfer) + assert isinstance(e.exception.__cause__, NCInsufficientFunds) + + class VertexHeadersTest(unittest.TestCase): def setUp(self) -> None: super().setUp() diff --git a/hathor_tests/verification/test_transfer_header_verifier.py b/hathor_tests/verification/test_transfer_header_verifier.py new file mode 100644 index 000000000..b218f8747 --- /dev/null +++ b/hathor_tests/verification/test_transfer_header_verifier.py @@ -0,0 +1,126 @@ +import dataclasses +from unittest.mock import patch + +import pytest +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ec + +from hathor.crypto.util import decode_address, get_public_key_bytes_compressed +from hathor.feature_activation.utils import Features +from hathor.nanocontracts.exception import NCInsufficientFunds +from hathor.nanocontracts.types import Address +from hathor.transaction import Transaction, TxInput, TxOutput +from hathor.transaction.exceptions import InvalidToken +from hathor.transaction.headers.transfer_header import TransferHeader, TxTransferInput, TxTransferOutput +from hathor.transaction.scripts import MultiSig +from hathor.transaction.scripts.opcode import OpcodesVersion +from hathor.transaction.scripts.p2pkh import P2PKH +from hathor.verification.transfer_header_verifier import TransferHeaderVerifier +from hathor.verification.verification_params import VerificationParams +from hathor.wallet.util import generate_multisig_address, generate_multisig_redeem_script +from hathor_tests import unittest + + +class TestTransferHeaderVerifier(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + self.manager = self.create_peer('unittests') + self.verifier = TransferHeaderVerifier( + settings=self.manager._settings, + tx_storage=self.manager.tx_storage, + ) + + def test_verify_inputs_and_outputs_rejects_duplicate_entries(self) -> None: + tx = Transaction(storage=self.manager.tx_storage) + tx.headers.append(TransferHeader( + tx=tx, + inputs=[], + outputs=[ + TxTransferOutput(address=b'\x01' * 25, amount=1, token_index=0), + TxTransferOutput(address=b'\x01' * 25, amount=1, token_index=0), + ], + )) + + with pytest.raises(InvalidToken, match='only one token id is allowed for each address'): + self.verifier.verify_inputs_and_outputs(tx) + + def test_verify_p2sh_input(self) -> None: + tx = Transaction(storage=self.manager.tx_storage) + privkey = ec.generate_private_key(ec.SECP256K1()) + pubkey_bytes = get_public_key_bytes_compressed(privkey.public_key()) + redeem_script = generate_multisig_redeem_script(1, [pubkey_bytes]) + multisig_address = decode_address(generate_multisig_address(redeem_script)) + signature = privkey.sign(tx.get_sighash_all_data(), ec.ECDSA(hashes.SHA256())) + multisig_input_data = MultiSig.create_input_data(redeem_script=redeem_script, signatures=[signature]) + + tx.headers.append(TransferHeader( + tx=tx, + inputs=[ + TxTransferInput( + address=multisig_address, + amount=1, + token_index=0, + script=multisig_input_data, + ), + ], + outputs=[], + )) + + # Should accept a valid multisig (P2SH) script for a multisig address. + self.verifier.verify_inputs_and_outputs(tx) + + def test_verify_balances_rejects_insufficient_funds(self) -> None: + tx = Transaction(storage=self.manager.tx_storage) + tx.headers.append(TransferHeader( + tx=tx, + inputs=[ + TxTransferInput( + address=b'\x01' * 25, + amount=1, + token_index=0, + script=b'', + ), + ], + outputs=[], + )) + params = self.get_verification_params(self.manager) + + with pytest.raises(NCInsufficientFunds): + self.verifier.verify_balances(tx, params) + + def test_verification_service_invokes_transfer_header_verifier(self) -> None: + best_block = self.manager.tx_storage.get_best_block() + spent_value = best_block.outputs[0].value + tx_input = TxInput(best_block.hash, 0, b'') + address_b58 = self.get_address(0) + assert address_b58 is not None + address = decode_address(address_b58) + output = TxOutput(spent_value - 1, P2PKH.create_output_script(address)) + tx = Transaction(inputs=[tx_input], outputs=[output], storage=self.manager.tx_storage) + + tx.headers.append(TransferHeader( + tx=tx, + inputs=[], + outputs=[ + TxTransferOutput(address=bytes(Address(address)), amount=1, token_index=0), + TxTransferOutput(address=bytes(Address(address)), amount=1, token_index=0), + ], + )) + tx.update_hash() + + params = VerificationParams.default_for_mempool(best_block=self.manager.tx_storage.get_best_block()) + params = dataclasses.replace(params, features=Features( + count_checkdatasig_op=True, + nanocontracts=True, + fee_tokens=True, + opcodes_version=OpcodesVersion.V2, + transfer_headers=True, + )) + + with patch( + 'hathor.verification.transfer_header_verifier.TransferHeaderVerifier.verify_inputs_and_outputs', + autospec=True, + ) as verify_transfer: + self.manager.verification_service.verify_without_storage(tx, params) + # Phase 2 expectation: this should be called once transfer validation is wired into verification service. + assert verify_transfer.called diff --git a/hathorlib/hathorlib/conf/settings.py b/hathorlib/hathorlib/conf/settings.py index cb8829247..c09a37037 100644 --- a/hathorlib/hathorlib/conf/settings.py +++ b/hathorlib/hathorlib/conf/settings.py @@ -511,6 +511,9 @@ def _validate_token_deposit_percentage(cls, token_deposit_percentage: float) -> # Used to enable opcodes V2. ENABLE_OPCODES_V2: FeatureSetting = FeatureSetting.DISABLED + # Used to enable transfer headers. + ENABLE_TRANSFER_HEADER: FeatureSetting = FeatureSetting.DISABLED + # List of enabled blueprints. BLUEPRINTS: dict[bytes, str] = {}