diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index a744ae38a..93441e169 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -147,9 +147,9 @@ jobs: os: ${{ matrix.os }} - name: Run genesis tests run: | - HATHOR_TEST_CONFIG_YAML='./hathor/conf/mainnet.yml' poetry run pytest -n0 --cov=hathor hathor_tests/tx/test_genesis.py - HATHOR_TEST_CONFIG_YAML='./hathor/conf/testnet.yml' poetry run pytest -n0 --cov=hathor --cov-append hathor_tests/tx/test_genesis.py - HATHOR_TEST_CONFIG_YAML='./hathor/conf/nano_testnet.yml' poetry run pytest -n0 --cov=hathor --cov-append hathor_tests/tx/test_genesis.py + HATHOR_TEST_CONFIG_YAML='./hathorlib/hathorlib/conf/mainnet.yml' poetry run pytest -n0 --cov=hathor hathor_tests/tx/test_genesis.py + HATHOR_TEST_CONFIG_YAML='./hathorlib/hathorlib/conf/testnet.yml' poetry run pytest -n0 --cov=hathor --cov-append hathor_tests/tx/test_genesis.py + HATHOR_TEST_CONFIG_YAML='./hathorlib/hathorlib/conf/nano_testnet.yml' poetry run pytest -n0 --cov=hathor --cov-append hathor_tests/tx/test_genesis.py - name: Run custom tests run: poetry run bash ./extras/custom_tests.sh - name: Run CI tests diff --git a/Makefile b/Makefile index c8870e1e6..ef5de105e 100644 --- a/Makefile +++ b/Makefile @@ -47,9 +47,9 @@ tests-quick: .PHONY: tests-genesis tests-genesis: - HATHOR_TEST_CONFIG_YAML='./hathor/conf/mainnet.yml' pytest -n0 hathor_tests/tx/test_genesis.py - HATHOR_TEST_CONFIG_YAML='./hathor/conf/testnet.yml' pytest -n0 hathor_tests/tx/test_genesis.py - HATHOR_TEST_CONFIG_YAML='./hathor/conf/nano_testnet.yml' pytest -n0 hathor_tests/tx/test_genesis.py + HATHOR_TEST_CONFIG_YAML='./hathorlib/hathorlib/conf/mainnet.yml' pytest -n0 hathor_tests/tx/test_genesis.py + HATHOR_TEST_CONFIG_YAML='./hathorlib/hathorlib/conf/testnet.yml' pytest -n0 hathor_tests/tx/test_genesis.py + HATHOR_TEST_CONFIG_YAML='./hathorlib/hathorlib/conf/nano_testnet.yml' pytest -n0 hathor_tests/tx/test_genesis.py .PHONY: tests-ci tests-ci: diff --git a/hathor/conf/__init__.py b/hathor/conf/__init__.py index 05ab4d4c1..5d4ccd33a 100644 --- a/hathor/conf/__init__.py +++ b/hathor/conf/__init__.py @@ -12,23 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from pathlib import Path - from hathor.conf.get_settings import HathorSettings -parent_dir = Path(__file__).parent - -MAINNET_SETTINGS_FILEPATH = str(parent_dir / 'mainnet.yml') -TESTNET_INDIA_SETTINGS_FILEPATH = str(parent_dir / 'testnet.yml') -NANO_TESTNET_SETTINGS_FILEPATH = str(parent_dir / 'nano_testnet.yml') -LOCALNET_SETTINGS_FILEPATH = str(parent_dir / 'localnet.yml') -UNITTESTS_SETTINGS_FILEPATH = str(parent_dir / 'unittests.yml') - __all__ = [ - 'MAINNET_SETTINGS_FILEPATH', - 'TESTNET_INDIA_SETTINGS_FILEPATH', - 'NANO_TESTNET_SETTINGS_FILEPATH', - 'LOCALNET_SETTINGS_FILEPATH', - 'UNITTESTS_SETTINGS_FILEPATH', 'HathorSettings', ] diff --git a/hathor/conf/get_settings.py b/hathor/conf/get_settings.py index fefd74c5c..b92386774 100644 --- a/hathor/conf/get_settings.py +++ b/hathor/conf/get_settings.py @@ -14,12 +14,13 @@ from __future__ import annotations -import importlib import os from typing import TYPE_CHECKING, NamedTuple, Optional from structlog import get_logger +from hathorlib.conf.utils import load_module_settings, load_yaml_settings + if TYPE_CHECKING: from hathor.conf.settings import HathorSettings as Settings @@ -29,17 +30,17 @@ class _SettingsMetadata(NamedTuple): source: str is_yaml: bool - settings: Settings + settings: 'Settings' _settings_singleton: Optional[_SettingsMetadata] = None -def get_global_settings() -> Settings: +def get_global_settings() -> 'Settings': return HathorSettings() -def HathorSettings() -> Settings: +def HathorSettings() -> 'Settings': """ Returns the configuration named tuple. @@ -53,7 +54,7 @@ def HathorSettings() -> Settings: if settings_module_filepath is not None: return _load_settings_singleton(settings_module_filepath, is_yaml=False) - from hathor import conf + from hathorlib import conf settings_yaml_filepath = os.environ.get('HATHOR_CONFIG_YAML', conf.MAINNET_SETTINGS_FILEPATH) return _load_settings_singleton(settings_yaml_filepath, is_yaml=True) @@ -68,7 +69,7 @@ def get_settings_source() -> str: return _settings_singleton.source -def _load_settings_singleton(source: str, *, is_yaml: bool) -> Settings: +def _load_settings_singleton(source: str, *, is_yaml: bool) -> 'Settings': global _settings_singleton if _settings_singleton is not None: @@ -79,29 +80,19 @@ def _load_settings_singleton(source: str, *, is_yaml: bool) -> Settings: return _settings_singleton.settings - settings_loader = _load_yaml_settings if is_yaml else _load_module_settings + if not is_yaml: + log = logger.new() + log.warn( + "Setting a config module via the 'HATHOR_CONFIG_FILE' env var will be deprecated soon. " + "Use the '--config-yaml' CLI option or the 'HATHOR_CONFIG_YAML' env var to set a yaml filepath instead." + ) + from hathor.conf.settings import HathorSettings as Settings + + settings_loader = load_yaml_settings if is_yaml else load_module_settings _settings_singleton = _SettingsMetadata( source=source, is_yaml=is_yaml, - settings=settings_loader(source) + settings=settings_loader(Settings, source) ) return _settings_singleton.settings - - -def _load_module_settings(module_path: str) -> Settings: - log = logger.new() - log.warn( - "Setting a config module via the 'HATHOR_CONFIG_FILE' env var will be deprecated soon. " - "Use the '--config-yaml' CLI option or the 'HATHOR_CONFIG_YAML' env var to set a yaml filepath instead." - ) - settings_module = importlib.import_module(module_path) - settings = getattr(settings_module, 'SETTINGS') - from hathor.conf.settings import HathorSettings as Settings - assert isinstance(settings, Settings) - return settings - - -def _load_yaml_settings(filepath: str) -> Settings: - from hathor.conf.settings import HathorSettings as Settings - return Settings.from_yaml(filepath=filepath) diff --git a/hathor/conf/mainnet.py b/hathor/conf/mainnet.py index ed4242a0e..e986304f6 100644 --- a/hathor/conf/mainnet.py +++ b/hathor/conf/mainnet.py @@ -13,10 +13,11 @@ # limitations under the License. from hathor.checkpoint import Checkpoint as cp -from hathor.conf.settings import FeatureSetting, HathorSettings +from hathor.conf.settings import HathorSettings from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.criteria import Criteria from hathor.feature_activation.settings import Settings as FeatureActivationSettings +from hathorlib.conf.settings import FeatureSetting SETTINGS = HathorSettings( P2PKH_VERSION_BYTE=b'\x28', diff --git a/hathor/conf/settings.py b/hathor/conf/settings.py index e7b5a509c..6a0ca8e47 100644 --- a/hathor/conf/settings.py +++ b/hathor/conf/settings.py @@ -11,20 +11,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from typing import Any, Union -import os -from enum import StrEnum, auto, unique -from math import log -from pathlib import Path -from typing import NamedTuple, Optional, Union - -import pydantic +from pydantic import ConfigDict, field_validator, model_validator +from typing_extensions import Self from hathor.checkpoint import Checkpoint from hathor.consensus.consensus_settings import ConsensusSettings, PowSettings from hathor.feature_activation.settings import Settings as FeatureActivationSettings -from hathor.utils import yaml -from hathor.utils.named_tuple import validated_named_tuple_from_dict +from hathorlib.conf.settings import HathorSettings as LibSettings DECIMAL_PLACES = 2 @@ -34,612 +29,50 @@ HATHOR_TOKEN_UID: bytes = b'\x00' -@unique -class FeatureSetting(StrEnum): - """Enum to configure the state of a feature.""" - - # Completely disabled. - DISABLED = auto() - - # Completely enabled since network creation. - ENABLED = auto() - - # Enabled through Feature Activation. - FEATURE_ACTIVATION = auto() - - def __bool__(self) -> bool: - """ - >>> bool(FeatureSetting.DISABLED) - False - >>> bool(FeatureSetting.ENABLED) - True - >>> bool(FeatureSetting.FEATURE_ACTIVATION) - True - """ - return self in (FeatureSetting.ENABLED, FeatureSetting.FEATURE_ACTIVATION) - - -class HathorSettings(NamedTuple): - # Version byte of the address in P2PKH - P2PKH_VERSION_BYTE: bytes - - # Version byte of the address in MultiSig - MULTISIG_VERSION_BYTE: bytes - - # Name of the network: "mainnet", "testnet-alpha", "testnet-bravo", ... - NETWORK_NAME: str - - # Initial bootstrap servers - BOOTSTRAP_DNS: list[str] = [] - - # enable peer whitelist - ENABLE_PEER_WHITELIST: bool = False - - # weather to use the whitelist with sync-v2 peers, does not affect whether the whitelist is enabled or not, it will - # always be enabled for sync-v1 if it is enabled - USE_PEER_WHITELIST_ON_SYNC_V2: bool = True - - DECIMAL_PLACES: int = DECIMAL_PLACES - - # Genesis pre-mined tokens - GENESIS_TOKEN_UNITS: int = GENESIS_TOKEN_UNITS - - GENESIS_TOKENS: int = GENESIS_TOKENS - - # Fee rate settings - FEE_PER_OUTPUT: int = 1 - - @property - def FEE_DIVISOR(self) -> int: - """Divisor used for evaluating fee amounts""" - result = 1 / self.TOKEN_DEPOSIT_PERCENTAGE - assert result.is_integer() - return int(result) - - # To disable reward halving, just set this to `None` and make sure that INITIAL_TOKEN_UNITS_PER_BLOCK is equal to - # MINIMUM_TOKEN_UNITS_PER_BLOCK. - BLOCKS_PER_HALVING: Optional[int] = 2 * 60 * 24 * 365 # 1051200, every 365 days - - INITIAL_TOKEN_UNITS_PER_BLOCK: int = 64 - MINIMUM_TOKEN_UNITS_PER_BLOCK: int = 8 - - @property - def INITIAL_TOKENS_PER_BLOCK(self) -> int: - return self.INITIAL_TOKEN_UNITS_PER_BLOCK * (10 ** DECIMAL_PLACES) - - @property - def MINIMUM_TOKENS_PER_BLOCK(self) -> int: - return self.MINIMUM_TOKEN_UNITS_PER_BLOCK * (10 ** DECIMAL_PLACES) - - # Assume that: amount < minimum - # But, amount = initial / (2**n), where n = number_of_halvings. Thus: - # initial / (2**n) < minimum - # initial / minimum < 2**n - # 2**n > initial / minimum - # Applying log to both sides: - # n > log2(initial / minimum) - # n > log2(initial) - log2(minimum) - @property - def MAXIMUM_NUMBER_OF_HALVINGS(self) -> int: - return int(log(self.INITIAL_TOKEN_UNITS_PER_BLOCK, 2) - log(self.MINIMUM_TOKEN_UNITS_PER_BLOCK, 2)) - - # Average time between blocks. - AVG_TIME_BETWEEN_BLOCKS: int = 30 # in seconds - - # Genesis pre-mined outputs - # P2PKH HMcJymyctyhnWsWTXqhP9txDwgNZaMWf42 - # - # To generate a new P2PKH script, run: - # >>> from hathor.transaction.scripts import P2PKH - # >>> import base58 - # >>> address = base58.b58decode('HMcJymyctyhnWsWTXqhP9txDwgNZaMWf42') - # >>> P2PKH.create_output_script(address=address).hex() - GENESIS_OUTPUT_SCRIPT: bytes = bytes.fromhex('76a914a584cf48b161e4a49223ed220df30037ab740e0088ac') - - # Genesis timestamps, nonces and hashes - - # Timestamp used for the genesis block - GENESIS_BLOCK_TIMESTAMP: int = 1572636343 - - @property - def GENESIS_TX1_TIMESTAMP(self) -> int: - """Timestamp used for the first genesis transaction.""" - return self.GENESIS_BLOCK_TIMESTAMP + 1 - - @property - def GENESIS_TX2_TIMESTAMP(self) -> int: - """Timestamp used for the second genesis transaction.""" - return self.GENESIS_BLOCK_TIMESTAMP + 2 - - GENESIS_BLOCK_NONCE: int = 3526202 - GENESIS_BLOCK_HASH: bytes = bytes.fromhex('000007eb968a6cdf0499e2d033faf1e163e0dc9cf41876acad4d421836972038') - GENESIS_TX1_NONCE: int = 12595 - GENESIS_TX1_HASH: bytes = bytes.fromhex('00025d75e44804a6a6a099f4320471c864b38d37b79b496ee26080a2a1fd5b7b') - GENESIS_TX2_NONCE: int = 21301 - GENESIS_TX2_HASH: bytes = bytes.fromhex('0002c187ab30d4f61c11a5dc43240bdf92dba4d19f40f1e883b0a5fdac54ef53') - - # Weight of genesis and minimum weight of a tx/block - MIN_BLOCK_WEIGHT: int = 21 - MIN_TX_WEIGHT: int = 14 - MIN_SHARE_WEIGHT: int = 21 - - HATHOR_TOKEN_UID: bytes = HATHOR_TOKEN_UID - - # Maximum distance between two consecutive blocks (in seconds), except for genesis. - # This prevent some DoS attacks exploiting the calculation of the score of a side chain. - # P(t > T) = exp(-MAX_DISTANCE_BETWEEN_BLOCKS / AVG_TIME_BETWEEN_BLOCKS) - # P(t > T) = exp(-35) = 6.3051e-16 - MAX_DISTANCE_BETWEEN_BLOCKS: int = 150 * AVG_TIME_BETWEEN_BLOCKS - - # Enable/disable weight decay. - WEIGHT_DECAY_ENABLED: bool = True - - # Minimum distance between two consecutive blocks that enables weight decay. - # Assuming that the hashrate is constant, the probability of activating is: - # P(t > T) = exp(-WEIGHT_DECAY_ACTIVATE_DISTANCE / AVG_TIME_BETWEEN_BLOCKS) - # P(t > T) = exp(-120) = 7.66e-53 - # But, if the hashrate drops 40 times, the expected time to find the next block - # becomes 40 * AVG_TIME_BETWEEN_BLOCKS = 20 minutes and the probability of - # activating the decay is exp(-3) = 0.05 = 5%. - WEIGHT_DECAY_ACTIVATE_DISTANCE: int = 120 * AVG_TIME_BETWEEN_BLOCKS - - # Window size of steps in which the weight is reduced when decaying is activated. - # The maximum number of steps is: - # max_steps = floor((MAX_DISTANCE_BETWEEN_BLOCKS - WEIGHT_DECAY_ACTIVATE_DISTANCE) / WEIGHT_DECAY_WINDOW_SIZE) - # Using these parameters, `max_steps = 15`. - WEIGHT_DECAY_WINDOW_SIZE: int = 60 - - # Amount to reduce the weight when decaying is activated. - # adj_weight = weight - decay - # difficulty = 2**adj_weight - # difficulty = 2**(weight - decay) - # difficulty = 2**weight / 2**decay - # As 2**(-2.73) = 0.15072, it reduces the mining difficulty for 15% of the original weight. - # Finally, the maximum decay is `max_steps * WEIGHT_DECAY_AMOUNT`. - # As `max_steps = 15`, then `max_decay = 2**(-15 * 2.73) = 4.71e-13`. - WEIGHT_DECAY_AMOUNT: float = 2.73 - - # Number of blocks to be found with the same hash algorithm as `block`. - # The bigger it is, the smaller the variance of the hash rate estimator is. - BLOCK_DIFFICULTY_N_BLOCKS: int = 134 - - # Size limit in bytes for Block data field - BLOCK_DATA_MAX_SIZE: int = 100 - - # Number of subfolders in the storage folder (used in JSONStorage and CompactStorage) - STORAGE_SUBFOLDERS: int = 256 - - # Maximum level of the neighborhood graph generated by graphviz - MAX_GRAPH_LEVEL: int = 3 - - # Maximum difference between our latest timestamp and a peer's synced timestamp to consider - # that the peer is synced (in seconds). - P2P_SYNC_THRESHOLD: int = 60 - - # This multiplier will be used to decide whether the fullnode has had recent activity in the p2p sync. - # This info will be used in the readiness endpoint as one of the checks. - # - # We will multiply it by the AVG_TIME_BETWEEN_BLOCKS, and compare the result with the gap between the - # current time and the latest timestamp in the database. - # - # If the gap is bigger than the calculated threshold, than we will say the fullnode is not ready (unhealthy). - # - # Using (P2P_RECENT_ACTIVITY_THRESHOLD_MULTIPLIER * AVG_TIME_BETWEEN_BLOCKS) as threshold will have false - # positives. - # The probability of a false positive is exp(-N), assuming the hash rate is constant during the period. - # In other words, a false positive is likely to occur every exp(N) blocks. If the hash rate decreases - # quickly, this probability gets bigger. - # - # For instance, P2P_RECENT_ACTIVITY_THRESHOLD_MULTIPLIER=5 would get a false positive every 90 minutes. - # For P2P_RECENT_ACTIVITY_THRESHOLD_MULTIPLIER=10, we would have a false positive every 8 days. - # For P2P_RECENT_ACTIVITY_THRESHOLD_MULTIPLIER=15, we would have a false positive every 3 years. - # - # On the other side, using higher numbers will lead to a higher delay for the fullnode to start reporting - # as not ready. - # - # So for use cases that may need more reponsiveness on this readiness check, at the cost of some eventual false - # positive, it could be a good idea to decrease the value in this setting. - P2P_RECENT_ACTIVITY_THRESHOLD_MULTIPLIER: int = 15 - - # Whether to warn the other peer of the reason for closing the connection - WHITELIST_WARN_BLOCKED_PEERS: bool = False - - # Maximum number of opened threads that are solving POW for send tokens - MAX_POW_THREADS: int = 5 - - # The error tolerance, to allow small rounding errors in Python, when comparing weights, - # accumulated weights, and scores - # How to use: - # if abs(w1 - w2) < WEIGHT_TOL: - # print('w1 and w2 are equal') - - # if w1 < w2 - WEIGHT_TOL: - # print('w1 is smaller than w2') - - # if w1 <= w2 + WEIGHT_TOL: - # print('w1 is smaller than or equal to w2') - - # if w1 > w2 + WEIGHT_TOL: - # print('w1 is greater than w2') - - # if w1 >= w2 - WEIGHT_TOL: - # print('w1 is greater than or equal to w2') - WEIGHT_TOL: float = 1e-10 - - # Maximum difference between the weight and the min_weight. - MAX_TX_WEIGHT_DIFF: float = 4.0 - MAX_TX_WEIGHT_DIFF_ACTIVATION: float = 32.0 - - # Maximum number of txs or blocks (each, not combined) to show on the dashboard - MAX_DASHBOARD_COUNT: int = 15 - - # Maximum number of txs or blocks returned by the '/transaction' endpoint - MAX_TX_COUNT: int = 15 - - # URL prefix where API is served, for instance: /v1a/status - API_VERSION_PREFIX: str = 'v1a' - - # If should use stratum to resolve pow of transactions in send tokens resource - SEND_TOKENS_STRATUM: bool = True - - # Maximum size of the tx output's script allowed by the /push-tx API. - PUSHTX_MAX_OUTPUT_SCRIPT_SIZE: int = 256 - - # Maximum number of subscribed addresses per websocket connection - WS_MAX_SUBS_ADDRS_CONN: Optional[int] = None - - # Maximum number of subscribed addresses that do not have any outputs (also per websocket connection) - WS_MAX_SUBS_ADDRS_EMPTY: Optional[int] = None - - # Whether miners are assumed to mine txs by default - STRATUM_MINE_TXS_DEFAULT: bool = True - - # Percentage used to calculate the number of HTR that must be deposited when minting new tokens - # The same percentage is used to calculate the number of HTR that must be withdraw when melting tokens - # See for further information, see [rfc 0011-token-deposit]. - TOKEN_DEPOSIT_PERCENTAGE: float = 0.01 - - # Array with the settings parameters that are used when calculating the settings hash - P2P_SETTINGS_HASH_FIELDS: list[str] = [ - 'P2PKH_VERSION_BYTE', - 'MULTISIG_VERSION_BYTE', - 'MIN_BLOCK_WEIGHT', - 'MIN_TX_WEIGHT', - 'BLOCK_DATA_MAX_SIZE' - ] - - # Maximum difference allowed between current time and a received tx timestamp (in seconds). Also used - # during peer connection. Peers shouldn't have their clocks more than MAX_FUTURE_TIMESTAMP_ALLOWED/2 apart - MAX_FUTURE_TIMESTAMP_ALLOWED: int = 5 * 60 - - # Multiplier for the value to increase the timestamp for the next retry moment to connect to the peer - PEER_CONNECTION_RETRY_INTERVAL_MULTIPLIER: int = 5 - - # Maximum retry interval for retrying to connect to the peer - PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL: int = 300 - - # Number max of connections in the p2p network - PEER_MAX_CONNECTIONS: int = 125 - - # Maximum period without receiving any messages from ther peer (in seconds). - PEER_IDLE_TIMEOUT: int = 60 - - # Maximum number of entrypoints that we accept that a peer broadcasts - PEER_MAX_ENTRYPOINTS: int = 30 - - # Filepath of ca certificate file to generate connection certificates - CA_FILEPATH: str = os.path.join(os.path.dirname(__file__), '../p2p/ca.crt') - - # Filepath of ca key file to sign connection certificates - CA_KEY_FILEPATH: str = os.path.join(os.path.dirname(__file__), '../p2p/ca.key') - - # Timeout (in seconds) for the downloading deferred (in the downloader) when syncing two peers - GET_DATA_TIMEOUT: int = 90 - - # Number of retries for downloading a tx from a peer (in the downloader) - GET_DATA_RETRIES: int = 5 - - # Maximum number of characters in a token name - MAX_LENGTH_TOKEN_NAME: int = 30 - - # Maximum number of characters in a token symbol - MAX_LENGTH_TOKEN_SYMBOL: int = 5 - - # Name of the Hathor token - HATHOR_TOKEN_NAME: str = 'Hathor' - - # Symbol of the Hathor token - HATHOR_TOKEN_SYMBOL: str = 'HTR' - - # After how many blocks can a reward be spent - REWARD_SPEND_MIN_BLOCKS: int = 300 - - # Mamimum number of inputs accepted - MAX_NUM_INPUTS: int = 255 - - # Mamimum number of outputs accepted - MAX_NUM_OUTPUTS: int = 255 - - # Maximum size of each txout's script (in bytes) - MAX_OUTPUT_SCRIPT_SIZE: int = 1024 - - # Maximum size of each txin's data (in bytes) - MAX_INPUT_DATA_SIZE: int = 1024 - - # Maximum number of pubkeys per OP_CHECKMULTISIG - MAX_MULTISIG_PUBKEYS: int = 20 - - # Maximum number of signatures per OP_CHECKMULTISIG - MAX_MULTISIG_SIGNATURES: int = 15 - - # Maximum number of sig operations of all inputs on a given tx - # including the redeemScript in case of MultiSig - MAX_TX_SIGOPS_INPUT: int = 255 * 5 - - # Maximum number of sig operations of all outputs on a given tx - MAX_TX_SIGOPS_OUTPUT: int = 255 * 5 - - # Maximum number of transactions returned on addresses history API - MAX_TX_ADDRESSES_HISTORY: int = 150 - - # Maximum number of elements (inputs and outputs) to be returned on address history API - # As a normal tx has ~2-4 inputs and 2 outputs, I would say the maximum should be 150*6 = 900 elements - MAX_INPUTS_OUTPUTS_ADDRESS_HISTORY: int = 6 * MAX_TX_ADDRESSES_HISTORY - - # Maximum number of TXs that will be sent by the Mempool API. - MEMPOOL_API_TX_LIMIT: int = 100 - - # Multiplier coefficient to adjust the minimum weight of a normal tx to 18 - MIN_TX_WEIGHT_COEFFICIENT: float = 1.6 - - # Amount in which tx min weight reaches the middle point between the minimum and maximum weight - MIN_TX_WEIGHT_K: int = 100 - - # Capabilities - CAPABILITY_WHITELIST: str = 'whitelist' - CAPABILITY_SYNC_VERSION: str = 'sync-version' - CAPABILITY_GET_BEST_BLOCKCHAIN: str = 'get-best-blockchain' - CAPABILITY_IPV6: str = 'ipv6' # peers announcing this capability will be relayed ipv6 entrypoints from other peers - CAPABILITY_NANO_STATE: str = 'nano-state' # indicates support for nano-state commands - - # Where to download whitelist from - WHITELIST_URL: Optional[str] = None - - # Interval (in seconds) to broadcast dashboard metrics to websocket connections - WS_SEND_METRICS_INTERVAL: int = 1 - - # Interval (in seconds) to write data to prometheus - PROMETHEUS_WRITE_INTERVAL: int = 15 - - # Interval (in seconds) to update GC data for prometheus - PROMETHEUS_UPDATE_GC_INTERVAL: int = 60 - - # Interval (in seconds) to collect metrics data - METRICS_COLLECT_DATA_INTERVAL: int = 5 - - # Interval (in seconds) to collect metrics data from rocksdb - METRICS_COLLECT_ROCKSDB_DATA_INTERVAL: int = 86400 # 1 day +class HathorSettings(LibSettings): + model_config = ConfigDict(extra='forbid') # Block checkpoints CHECKPOINTS: list[Checkpoint] = [] - # Used on testing to enable slow asserts that help catch bugs but we don't want to run in production - SLOW_ASSERTS: bool = False - - # List of soft voided transaction. - SOFT_VOIDED_TX_IDS: list[bytes] = [] - - # List of transactions to skip verification. - SKIP_VERIFICATION: list[bytes] = [] - - # Identifier used in metadata's voided_by to mark a tx as soft-voided. - SOFT_VOIDED_ID: bytes = b'tx-non-grata' - - # Identifier used in metadata's voided_by when an unexpected exception occurs at consensus. - CONSENSUS_FAIL_ID: bytes = b'consensus-fail' - - # Identifier used in metadata's voided_by to mark a tx as partially validated. - PARTIALLY_VALIDATED_ID: bytes = b'pending-validation' - - # Maximum number of sync running simultaneously. - MAX_ENABLED_SYNC: int = 8 + @field_validator('CHECKPOINTS', mode='before') + @classmethod + def _parse_checkpoints(cls, checkpoints: Union[dict[int, str], list[Checkpoint]]) -> list[Checkpoint]: + """Parse a dictionary of raw checkpoint data into a list of checkpoints.""" + if isinstance(checkpoints, dict): + return [ + Checkpoint(height, bytes.fromhex(_hash)) + for height, _hash in checkpoints.items() + ] - # Time to update the peers that are running sync. - SYNC_UPDATE_INTERVAL: int = 10 * 60 # seconds + if not isinstance(checkpoints, list): + raise TypeError(f'expected \'dict[int, str]\' or \'list[Checkpoint]\', got {checkpoints}') - # Interval to re-run peer discovery. - PEER_DISCOVERY_INTERVAL: int = 5 * 60 # seconds + return checkpoints # All settings related to Feature Activation FEATURE_ACTIVATION: FeatureActivationSettings = FeatureActivationSettings() - # Maximum number of GET_TIPS delayed calls per connection while running sync. - MAX_GET_TIPS_DELAYED_CALLS: int = 5 - - # Maximum number of blocks in the best blockchain list. - MAX_BEST_BLOCKCHAIN_BLOCKS: int = 20 - - # Default number of blocks in the best blockchain list. - DEFAULT_BEST_BLOCKCHAIN_BLOCKS: int = 10 - - # Time in seconds to request the best blockchain from peers. - BEST_BLOCKCHAIN_INTERVAL: int = 5 # seconds - - # Merged mining settings. The old value is going to be replaced by the new value through Feature Activation. - OLD_MAX_MERKLE_PATH_LENGTH: int = 12 - NEW_MAX_MERKLE_PATH_LENGTH: int = 20 - - # Maximum number of tx tips to accept in the initial phase of the mempool sync 1000 is arbitrary, but it should be - # more than enough for the forseeable future - MAX_MEMPOOL_RECEIVING_TIPS: int = 1000 - - # Max number of peers simultanously stored in the node - MAX_VERIFIED_PEERS: int = 10_000 - - # Max number of peers simultanously stored per-connection - MAX_UNVERIFIED_PEERS_PER_CONN: int = 100 - - # Used to enable nano contracts. - ENABLE_NANO_CONTRACTS: FeatureSetting = FeatureSetting.DISABLED - - # Used to enable fee-based tokens. - ENABLE_FEE_BASED_TOKENS: FeatureSetting = FeatureSetting.DISABLED - - # Used to enable opcodes V2. - ENABLE_OPCODES_V2: FeatureSetting = FeatureSetting.DISABLED - - # List of enabled blueprints. - BLUEPRINTS: dict[bytes, str] = {} + @field_validator('FEATURE_ACTIVATION', mode='before') + @classmethod + def parse_feature_activation(cls, v: dict[str, Any]) -> FeatureActivationSettings: + if isinstance(v, dict): + return FeatureActivationSettings.model_validate(v) + else: + return v # The consensus algorithm protocol settings. CONSENSUS_ALGORITHM: ConsensusSettings = PowSettings() - # The name and symbol of the native token. This is only used in APIs to serve clients. - NATIVE_TOKEN_NAME: str = 'Hathor' - NATIVE_TOKEN_SYMBOL: str = 'HTR' - - # The pubkeys allowed to create on-chain-blueprints in the network - # XXX: in the future this restriction will be lifted, possibly through a feature activation - NC_ON_CHAIN_BLUEPRINT_RESTRICTED: bool = True - NC_ON_CHAIN_BLUEPRINT_ALLOWED_ADDRESSES: list[str] = [] - - # Max length in bytes allowed for on-chain blueprint code after decompression, 240KB (not KiB) - NC_ON_CHAIN_BLUEPRINT_CODE_MAX_SIZE_UNCOMPRESSED: int = 240_000 - - # Max length in bytes allowed for on-chain blueprint code inside the transaction, 24KB (not KiB) - NC_ON_CHAIN_BLUEPRINT_CODE_MAX_SIZE_COMPRESSED: int = 24_000 - - # TODO: align this with a realistic value later - # fuel units are arbitrary but it's roughly the number of opcodes, memory_limit is in bytes - NC_INITIAL_FUEL_TO_LOAD_BLUEPRINT_MODULE: int = 100_000 # 100K opcodes - NC_MEMORY_LIMIT_TO_LOAD_BLUEPRINT_MODULE: int = 100 * 1024 * 1024 # 100MiB - NC_INITIAL_FUEL_TO_CALL_METHOD: int = 1_000_000 # 1M opcodes - NC_MEMORY_LIMIT_TO_CALL_METHOD: int = 1024 * 1024 * 1024 # 1GiB - - @classmethod - def from_yaml(cls, *, filepath: str) -> 'HathorSettings': - """Takes a filepath to a yaml file and returns a validated HathorSettings instance.""" - settings_dict = yaml.dict_from_extended_yaml(filepath=filepath, custom_root=Path(__file__).parent) - - return validated_named_tuple_from_dict( - HathorSettings, - settings_dict, - validators=_VALIDATORS - ) - - -def _parse_checkpoints(checkpoints: Union[dict[int, str], list[Checkpoint]]) -> list[Checkpoint]: - """Parse a dictionary of raw checkpoint data into a list of checkpoints.""" - if isinstance(checkpoints, dict): - return [ - Checkpoint(height, bytes.fromhex(_hash)) - for height, _hash in checkpoints.items() - ] - - if not isinstance(checkpoints, list): - raise TypeError(f'expected \'dict[int, str]\' or \'list[Checkpoint]\', got {checkpoints}') - - return checkpoints - - -def _parse_blueprints(blueprints_raw: dict[str, str]) -> dict[bytes, str]: - """Parse dict[str, str] into dict[bytes, str].""" - blueprints: dict[bytes, str] = {} - for _id_str, _name in blueprints_raw.items(): - _id = bytes.fromhex(_id_str) - if _id in blueprints: - raise TypeError(f'Duplicate blueprint id: {_id_str}') - blueprints[_id] = _name - return blueprints - - -def parse_hex_str(hex_str: Union[str, bytes]) -> bytes: - """Parse a raw hex string into bytes.""" - if isinstance(hex_str, str): - return bytes.fromhex(hex_str.lstrip('x')) - - if not isinstance(hex_str, bytes): - raise ValueError(f'expected \'str\' or \'bytes\', got {hex_str}') - - return hex_str - - -def _validate_consensus_algorithm(model: HathorSettings) -> HathorSettings: - """Validate that if Proof-of-Authority is enabled, block rewards must not be set.""" - consensus_algorithm = model.CONSENSUS_ALGORITHM - if consensus_algorithm.is_pow(): - return model - - if (model.BLOCKS_PER_HALVING is not None or - model.INITIAL_TOKEN_UNITS_PER_BLOCK != 0 or - model.MINIMUM_TOKEN_UNITS_PER_BLOCK != 0): - raise ValueError('PoA networks do not support block rewards') - return model - - -def _validate_tokens(model: HathorSettings) -> HathorSettings: - """Validate genesis tokens.""" - genesis_tokens = model.GENESIS_TOKENS - genesis_token_units = model.GENESIS_TOKEN_UNITS - decimal_places = model.DECIMAL_PLACES - - if genesis_tokens != genesis_token_units * (10 ** decimal_places): - raise ValueError( - f'invalid tokens: GENESIS_TOKENS={genesis_tokens}, ' - f'GENESIS_TOKEN_UNITS={genesis_token_units}, DECIMAL_PLACES={decimal_places}' - ) - return model - - -def _validate_token_deposit_percentage(token_deposit_percentage: float) -> float: - """Validate that TOKEN_DEPOSIT_PERCENTAGE results in an integer FEE_DIVISOR.""" - result = 1 / token_deposit_percentage - if not result.is_integer(): - raise ValueError( - f'TOKEN_DEPOSIT_PERCENTAGE must result in an integer FEE_DIVISOR. ' - f'Got TOKEN_DEPOSIT_PERCENTAGE={token_deposit_percentage}, FEE_DIVISOR={result}' - ) - return token_deposit_percentage - - -_VALIDATORS = dict( - _parse_hex_str=pydantic.field_validator( - 'P2PKH_VERSION_BYTE', - 'MULTISIG_VERSION_BYTE', - 'GENESIS_OUTPUT_SCRIPT', - 'GENESIS_BLOCK_HASH', - 'GENESIS_TX1_HASH', - 'GENESIS_TX2_HASH', - mode='before', - )(parse_hex_str), - _parse_soft_voided_tx_id=pydantic.field_validator( - 'SOFT_VOIDED_TX_IDS', - mode='before', - )(lambda v: [parse_hex_str(x) for x in v] if isinstance(v, list) else v), - _parse_skipped_verification_tx_id=pydantic.field_validator( - 'SKIP_VERIFICATION', - mode='before', - )(lambda v: [parse_hex_str(x) for x in v] if isinstance(v, list) else v), - _parse_checkpoints=pydantic.field_validator( - 'CHECKPOINTS', - mode='before', - )(_parse_checkpoints), - _parse_blueprints=pydantic.field_validator( - 'BLUEPRINTS', - mode='before', - )(_parse_blueprints), - _validate_consensus_algorithm=pydantic.model_validator( - mode='after', - )(_validate_consensus_algorithm), - _validate_tokens=pydantic.model_validator( - mode='after', - )(_validate_tokens), - _validate_token_deposit_percentage=pydantic.field_validator( - 'TOKEN_DEPOSIT_PERCENTAGE', - mode='after', - )(_validate_token_deposit_percentage), - _parse_feature_activation=pydantic.field_validator( - 'FEATURE_ACTIVATION', - mode='before', - )(lambda v: FeatureActivationSettings.model_validate(v) if isinstance(v, dict) else v), -) + @model_validator(mode='after') + def _validate_consensus_algorithm(self) -> Self: + """Validate that if Proof-of-Authority is enabled, block rewards must not be set.""" + consensus_algorithm = self.CONSENSUS_ALGORITHM + if consensus_algorithm.is_pow(): + return self + + if (self.BLOCKS_PER_HALVING is not None or + self.INITIAL_TOKEN_UNITS_PER_BLOCK != 0 or + self.MINIMUM_TOKEN_UNITS_PER_BLOCK != 0): + raise ValueError('PoA networks do not support block rewards') + return self diff --git a/hathor/feature_activation/utils.py b/hathor/feature_activation/utils.py index 774707bac..47a01235b 100644 --- a/hathor/feature_activation/utils.py +++ b/hathor/feature_activation/utils.py @@ -22,9 +22,10 @@ from hathor.transaction.scripts.opcode import OpcodesVersion if TYPE_CHECKING: - from hathor.conf.settings import FeatureSetting, HathorSettings + from hathor.conf.settings import HathorSettings from hathor.feature_activation.feature_service import FeatureService from hathor.transaction import Vertex + from hathorlib.conf.settings import FeatureSetting @dataclass(slots=True, frozen=True, kw_only=True) @@ -39,7 +40,7 @@ class Features: @staticmethod def from_vertex(*, settings: HathorSettings, feature_service: FeatureService, vertex: Vertex) -> Features: """Return whether the Nano Contracts feature is active according to the provided settings and vertex.""" - from hathor.conf.settings import FeatureSetting + from hathorlib.conf.settings import FeatureSetting feature_states = feature_service.get_feature_states(vertex=vertex) feature_settings = { Feature.COUNT_CHECKDATASIG_OP: FeatureSetting.FEATURE_ACTIVATION, @@ -65,7 +66,7 @@ def from_vertex(*, settings: HathorSettings, feature_service: FeatureService, ve def _is_feature_active(setting: FeatureSetting, state: FeatureState) -> bool: """Return whether a feature is active based on the setting and state.""" - from hathor.conf.settings import FeatureSetting + from hathorlib.conf.settings import FeatureSetting match setting: case FeatureSetting.DISABLED: return False diff --git a/hathor/simulator/simulator.py b/hathor/simulator/simulator.py index 44f655935..5dae73a9f 100644 --- a/hathor/simulator/simulator.py +++ b/hathor/simulator/simulator.py @@ -56,7 +56,9 @@ def __init__(self, seed: Optional[int] = None): seed = secrets.randbits(64) self.seed = seed self.rng = Random(self.seed) - self.settings = get_global_settings()._replace(AVG_TIME_BETWEEN_BLOCKS=SIMULATOR_AVG_TIME_BETWEEN_BLOCKS) + self.settings = get_global_settings().model_copy( + update={"AVG_TIME_BETWEEN_BLOCKS": SIMULATOR_AVG_TIME_BETWEEN_BLOCKS} + ) self._clock = MemoryReactorHeapClock() self._peers: OrderedDict[str, HathorManager] = OrderedDict() self._connections: list['FakeConnection'] = [] diff --git a/hathor/utils/named_tuple.py b/hathor/utils/named_tuple.py deleted file mode 100644 index 6adb85744..000000000 --- a/hathor/utils/named_tuple.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2023 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Any, NamedTuple, TypeVar, get_type_hints - -from pydantic import TypeAdapter, create_model - -from hathor.utils.pydantic import BaseModel - -T = TypeVar('T', bound=NamedTuple) - - -def validated_named_tuple_from_dict( - named_tuple_type: type[T], - attributes_dict: dict[str, Any], - *, - validators: dict[str, Any] | None = None -) -> T: - """ - Takes an attributes dict and returns a validated instance of the specified NamedTuple subclass. - Performs validation using pydantic. - - Args: - named_tuple_type: the NamedTuple subclass to create an instance from - attributes_dict: a dict with all required attributes for the NamedTuple subclass - validators: custom pydantic field_validators (dict of name -> decorated validator) - - Returns: a validated instance of the specified NamedTuple subclass - """ - if not validators: - # Simple case: use TypeAdapter directly (Pydantic v2 native NamedTuple support) - adapter = TypeAdapter(named_tuple_type) - return adapter.validate_python(attributes_dict) - - # Complex case with validators: create a dynamic model - type_hints = get_type_hints(named_tuple_type) - defaults = getattr(named_tuple_type, '_field_defaults', {}) - - field_definitions: dict[str, Any] = { - name: (hint, defaults.get(name, ...)) - for name, hint in type_hints.items() - } - - model = create_model( - f'{named_tuple_type.__name__}Model', - __base__=BaseModel, - __validators__=validators, - **field_definitions - ) - - # Fill in defaults via intermediate NamedTuple, then validate - all_attributes = named_tuple_type(**attributes_dict) # type: ignore[call-overload] - validated = model.model_validate(all_attributes._asdict()) - - # Use dict comprehension to get validated attributes directly from the model - # instead of model_dump() which would convert nested Pydantic models to dicts - validated_dict = {name: getattr(validated, name) for name in type_hints} - return named_tuple_type(**validated_dict) # type: ignore[call-overload] diff --git a/hathor_cli/check_blueprint.py b/hathor_cli/check_blueprint.py index 4a7963b3d..475bc5fc9 100644 --- a/hathor_cli/check_blueprint.py +++ b/hathor_cli/check_blueprint.py @@ -19,12 +19,12 @@ def main() -> None: - from hathor_cli.util import create_parser - from hathor.conf import NANO_TESTNET_SETTINGS_FILEPATH from hathor.conf.get_settings import get_global_settings from hathor.nanocontracts import OnChainBlueprint from hathor.nanocontracts.on_chain_blueprint import Code from hathor.verification.on_chain_blueprint_verifier import OnChainBlueprintVerifier + from hathor_cli.util import create_parser + from hathorlib.conf import NANO_TESTNET_SETTINGS_FILEPATH os.environ['HATHOR_CONFIG_YAML'] = NANO_TESTNET_SETTINGS_FILEPATH diff --git a/hathor_cli/events_simulator/events_simulator.py b/hathor_cli/events_simulator/events_simulator.py index 3ba629d91..e5f53ae5c 100644 --- a/hathor_cli/events_simulator/events_simulator.py +++ b/hathor_cli/events_simulator/events_simulator.py @@ -44,7 +44,7 @@ def create_parser() -> ArgumentParser: def execute(args: Namespace, reactor: 'ReactorProtocol') -> None: - from hathor.conf import UNITTESTS_SETTINGS_FILEPATH + from hathorlib.conf import UNITTESTS_SETTINGS_FILEPATH os.environ['HATHOR_CONFIG_YAML'] = UNITTESTS_SETTINGS_FILEPATH from hathor_cli.events_simulator.event_forwarding_websocket_factory import EventForwardingWebsocketFactory from hathor_cli.events_simulator.scenario import Scenario @@ -57,7 +57,9 @@ def execute(args: Namespace, reactor: 'ReactorProtocol') -> None: possible_scenarios = [scenario.name for scenario in Scenario] raise ValueError(f'Invalid scenario "{args.scenario}". Choose one of {possible_scenarios}') from e - settings = get_global_settings()._replace(REWARD_SPEND_MIN_BLOCKS=scenario.get_reward_spend_min_blocks()) + settings = get_global_settings().model_copy( + update={"REWARD_SPEND_MIN_BLOCKS": scenario.get_reward_spend_min_blocks()} + ) log = logger.new() simulator = Simulator(args.seed) simulator.start() diff --git a/hathor_cli/run_node.py b/hathor_cli/run_node.py index 2e5b781b8..ddef45847 100644 --- a/hathor_cli/run_node.py +++ b/hathor_cli/run_node.py @@ -499,7 +499,7 @@ def check_python_version(self) -> None: ])) def __init__(self, *, argv=None): - from hathor.conf import ( + from hathorlib.conf import ( LOCALNET_SETTINGS_FILEPATH, NANO_TESTNET_SETTINGS_FILEPATH, TESTNET_INDIA_SETTINGS_FILEPATH, diff --git a/hathor_tests/conftest.py b/hathor_tests/conftest.py index be711c139..f9cb2083b 100644 --- a/hathor_tests/conftest.py +++ b/hathor_tests/conftest.py @@ -1,7 +1,7 @@ import os -from hathor.conf import UNITTESTS_SETTINGS_FILEPATH from hathor.reactor import initialize_global_reactor +from hathorlib.conf import UNITTESTS_SETTINGS_FILEPATH os.environ['HATHOR_CONFIG_YAML'] = os.environ.get('HATHOR_TEST_CONFIG_YAML', UNITTESTS_SETTINGS_FILEPATH) diff --git a/hathor_tests/consensus/test_consensus6.py b/hathor_tests/consensus/test_consensus6.py index 16dd62004..8fcd52a83 100644 --- a/hathor_tests/consensus/test_consensus6.py +++ b/hathor_tests/consensus/test_consensus6.py @@ -24,7 +24,7 @@ class TestConsensus6(unittest.TestCase): def setUp(self) -> None: super().setUp() - settings = self._settings._replace(REWARD_SPEND_MIN_BLOCKS=1) # for simplicity + settings = self._settings.model_copy(update={'REWARD_SPEND_MIN_BLOCKS': 1}) # for simplicity daa = DifficultyAdjustmentAlgorithm(settings=settings, test_mode=TestMode.TEST_ALL_WEIGHT) builder = self.get_builder(settings).set_daa(daa) diff --git a/hathor_tests/event/event_simulation_tester.py b/hathor_tests/event/event_simulation_tester.py index 172a79fac..8e0fffd26 100644 --- a/hathor_tests/event/event_simulation_tester.py +++ b/hathor_tests/event/event_simulation_tester.py @@ -34,10 +34,16 @@ def setUp(self) -> None: def _prepare(self, reward_spend_min_blocks: int) -> None: peer = PrivatePeer.auto_generated() - builder = self.simulator.get_default_builder() \ - .set_peer(peer) \ - .enable_event_queue() \ - .set_settings(self._settings._replace(REWARD_SPEND_MIN_BLOCKS=reward_spend_min_blocks)) + builder = ( + self.simulator.get_default_builder() + .set_peer(peer) + .enable_event_queue() + .set_settings( + self._settings.model_copy( + update={"REWARD_SPEND_MIN_BLOCKS": reward_spend_min_blocks} + ) + ) + ) artifacts = self.simulator.create_artifacts(builder) self.peer_id: str = str(peer.id) diff --git a/hathor_tests/feature_activation/test_bit_signaling_service.py b/hathor_tests/feature_activation/test_bit_signaling_service.py index 5f41ff01a..f5f63855b 100644 --- a/hathor_tests/feature_activation/test_bit_signaling_service.py +++ b/hathor_tests/feature_activation/test_bit_signaling_service.py @@ -16,7 +16,6 @@ import pytest -from hathor.conf.settings import HathorSettings from hathor.feature_activation.bit_signaling_service import BitSignalingService from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService @@ -166,7 +165,7 @@ def _test_generate_signal_bits( support_features: set[Feature], not_support_features: set[Feature] ) -> int: - settings = Mock(spec_set=HathorSettings) + settings = Mock() settings.FEATURE_ACTIVATION = FeatureSettings() feature_service = Mock(spec_set=FeatureService) feature_service.get_feature_infos = lambda vertex: feature_infos @@ -255,7 +254,7 @@ def test_non_signaling_features_warning( not_support_features: set[Feature], non_signaling_features: set[str], ) -> None: - settings = Mock(spec_set=HathorSettings) + settings = Mock() settings.FEATURE_ACTIVATION = FeatureSettings() best_block = Mock(spec_set=Block) diff --git a/hathor_tests/feature_activation/test_feature_service.py b/hathor_tests/feature_activation/test_feature_service.py index e18e39588..549906042 100644 --- a/hathor_tests/feature_activation/test_feature_service.py +++ b/hathor_tests/feature_activation/test_feature_service.py @@ -95,7 +95,7 @@ def get_settings(*, features: dict[Feature, Criteria]) -> HathorSettings: default_threshold=3, features=features, ) - settings = get_global_settings()._replace(FEATURE_ACTIVATION=feature_settings) + settings = get_global_settings().model_copy(update={'FEATURE_ACTIVATION': feature_settings}) return settings @@ -223,7 +223,7 @@ def test_get_state_from_started_to_locked_in_on_default_threshold( ) } ) - settings = get_global_settings()._replace(FEATURE_ACTIVATION=feature_settings) + settings = get_global_settings().model_copy(update={'FEATURE_ACTIVATION': feature_settings}) storage = get_storage(settings, up_to_height=block_height) service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() @@ -647,7 +647,7 @@ def test_check_must_signal( ) } ) - settings = get_global_settings()._replace(FEATURE_ACTIVATION=feature_settings) + settings = get_global_settings().model_copy(update={'FEATURE_ACTIVATION': feature_settings}) storage = get_storage(settings, up_to_height=block_height) service = FeatureService(settings=settings, tx_storage=storage) service.bit_signaling_service = Mock() diff --git a/hathor_tests/feature_activation/test_feature_simulation.py b/hathor_tests/feature_activation/test_feature_simulation.py index f5cd0ddfd..ff5e3e255 100644 --- a/hathor_tests/feature_activation/test_feature_simulation.py +++ b/hathor_tests/feature_activation/test_feature_simulation.py @@ -77,7 +77,12 @@ def test_feature(self) -> None: } ) - settings = get_global_settings()._replace(FEATURE_ACTIVATION=feature_settings, REWARD_SPEND_MIN_BLOCKS=0) + settings = get_global_settings().model_copy( + update={ + "FEATURE_ACTIVATION": feature_settings, + "REWARD_SPEND_MIN_BLOCKS": 0, + } + ) self.simulator.settings = settings builder = self.get_simulator_builder().set_settings(settings) artifacts = self.simulator.create_artifacts(builder) @@ -468,7 +473,7 @@ def test_reorg(self) -> None: } ) - settings = get_global_settings()._replace(FEATURE_ACTIVATION=feature_settings) + settings = get_global_settings().model_copy(update={'FEATURE_ACTIVATION': feature_settings}) builder = self.get_simulator_builder().set_settings(settings) artifacts = self.simulator.create_artifacts(builder) feature_service = artifacts.feature_service @@ -701,7 +706,7 @@ def test_feature_from_existing_storage(self) -> None: } ) - settings = get_global_settings()._replace(FEATURE_ACTIVATION=feature_settings) + settings = get_global_settings().model_copy(update={'FEATURE_ACTIVATION': feature_settings}) rocksdb_dir = self.get_rocksdb_directory() builder1 = self.get_simulator_builder_from_dir(rocksdb_dir).set_settings(settings) artifacts1 = self.simulator.create_artifacts(builder1) diff --git a/hathor_tests/feature_activation/test_mining_simulation.py b/hathor_tests/feature_activation/test_mining_simulation.py index 734ba70cf..f7dde7630 100644 --- a/hathor_tests/feature_activation/test_mining_simulation.py +++ b/hathor_tests/feature_activation/test_mining_simulation.py @@ -20,7 +20,6 @@ from twisted.internet.testing import StringTransport from hathor.conf import HathorSettings as get_settings -from hathor.conf.settings import HathorSettings from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.criteria import Criteria from hathor.feature_activation.settings import Settings as FeatureSettings @@ -35,9 +34,8 @@ class MiningSimulationTest(SimulatorTestCase): def test_signal_bits_in_mining(self) -> None: - settings_dict = get_settings()._asdict() - settings_dict.update( - FEATURE_ACTIVATION=FeatureSettings( + settings = get_settings().model_copy(update={ + 'FEATURE_ACTIVATION': FeatureSettings( evaluation_interval=4, default_threshold=3, features={ @@ -56,8 +54,7 @@ def test_signal_bits_in_mining(self) -> None: ), } ) - ) - settings = HathorSettings(**settings_dict) + }) builder = self.simulator.get_default_builder() \ .set_settings(settings) \ diff --git a/hathor_tests/nanocontracts/test_emit_event_payload.py b/hathor_tests/nanocontracts/test_emit_event_payload.py index bd7d1af2a..b2183ff1d 100644 --- a/hathor_tests/nanocontracts/test_emit_event_payload.py +++ b/hathor_tests/nanocontracts/test_emit_event_payload.py @@ -30,7 +30,7 @@ def initialize(self, ctx: Context) -> None: class EmitEventPayloadTestCase(BlueprintTestCase): def build_manager(self) -> HathorManager: # Lower reward spend requirement to avoid reward-lock interference in this focused test. - settings = self._settings._replace(REWARD_SPEND_MIN_BLOCKS=1) + settings = self._settings.model_copy(update={"REWARD_SPEND_MIN_BLOCKS": 1}) return self.create_peer( 'unittests', nc_indexes=True, diff --git a/hathor_tests/nanocontracts/test_feature_activations.py b/hathor_tests/nanocontracts/test_feature_activations.py index 49abb05f6..00716249c 100644 --- a/hathor_tests/nanocontracts/test_feature_activations.py +++ b/hathor_tests/nanocontracts/test_feature_activations.py @@ -14,7 +14,6 @@ import pytest -from hathor.conf.settings import FeatureSetting from hathor.crypto.util import decode_address, get_address_from_public_key_hash from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode from hathor.exception import InvalidNewTransaction @@ -29,6 +28,7 @@ from hathor.transaction.scripts import P2PKH, Opcode from hathor_tests import unittest from hathor_tests.dag_builder.builder import TestDAGBuilder +from hathorlib.conf.settings import FeatureSetting class MyBluprint(Blueprint): @@ -75,12 +75,12 @@ def setUp(self) -> None: } ) - settings = self._settings._replace( - ENABLE_NANO_CONTRACTS=FeatureSetting.FEATURE_ACTIVATION, - ENABLE_FEE_BASED_TOKENS=FeatureSetting.FEATURE_ACTIVATION, - ENABLE_OPCODES_V2=FeatureSetting.FEATURE_ACTIVATION, - FEATURE_ACTIVATION=feature_settings, - ) + settings = self._settings.model_copy(update={ + 'ENABLE_NANO_CONTRACTS': FeatureSetting.FEATURE_ACTIVATION, + 'ENABLE_FEE_BASED_TOKENS': FeatureSetting.FEATURE_ACTIVATION, + 'ENABLE_OPCODES_V2': FeatureSetting.FEATURE_ACTIVATION, + 'FEATURE_ACTIVATION': feature_settings, + }) daa = DifficultyAdjustmentAlgorithm(settings=self._settings, test_mode=TestMode.TEST_ALL_WEIGHT) builder = self.get_builder(settings).set_daa(daa) diff --git a/hathor_tests/nanocontracts/test_nc_exec_logs.py b/hathor_tests/nanocontracts/test_nc_exec_logs.py index 0c09ffcb4..7b6dfb386 100644 --- a/hathor_tests/nanocontracts/test_nc_exec_logs.py +++ b/hathor_tests/nanocontracts/test_nc_exec_logs.py @@ -108,9 +108,9 @@ def _get_initialize_entries(self, tx: Transaction) -> list[NCCallBeginEntry | NC ] def _prepare(self, nc_log_config: NCLogConfig = NCLogConfig.ALL) -> None: - settings = self._settings._replace( - REWARD_SPEND_MIN_BLOCKS=1, # to make tests quicker - ) + settings = self._settings.model_copy(update={ + 'REWARD_SPEND_MIN_BLOCKS': 1, # to make tests quicker + }) artifacts = self.get_builder() \ .set_settings(settings) \ .set_nc_log_config(nc_log_config) \ diff --git a/hathor_tests/nanocontracts/test_restricted_ocb.py b/hathor_tests/nanocontracts/test_restricted_ocb.py index cd167238a..47292cc7c 100644 --- a/hathor_tests/nanocontracts/test_restricted_ocb.py +++ b/hathor_tests/nanocontracts/test_restricted_ocb.py @@ -89,7 +89,7 @@ def test_ocb_address_not_allowed(self) -> None: def test_ocb_unrestricted(self) -> None: builder = self.get_builder() \ - .set_settings(self._settings._replace(NC_ON_CHAIN_BLUEPRINT_RESTRICTED=False)) + .set_settings(self._settings.model_copy(update={'NC_ON_CHAIN_BLUEPRINT_RESTRICTED': False})) manager = self.create_peer_from_builder(builder) dag_builder = TestDAGBuilder.from_manager(manager) password = b'abc' @@ -124,7 +124,7 @@ def test_ocb_unrestricted(self) -> None: def test_ocb_invalid_pubkey(self) -> None: builder = self.get_builder() \ - .set_settings(self._settings._replace(NC_ON_CHAIN_BLUEPRINT_RESTRICTED=False)) + .set_settings(self._settings.model_copy(update={'NC_ON_CHAIN_BLUEPRINT_RESTRICTED': False})) manager = self.create_peer_from_builder(builder) dag_builder = TestDAGBuilder.from_manager(manager) private_key = unittest.OCB_TEST_PRIVKEY.hex() diff --git a/hathor_tests/others/test_bfs_regression.py b/hathor_tests/others/test_bfs_regression.py index 783b5a441..e1aea46ea 100644 --- a/hathor_tests/others/test_bfs_regression.py +++ b/hathor_tests/others/test_bfs_regression.py @@ -21,7 +21,7 @@ class TestBfsRegression(unittest.TestCase): def setUp(self) -> None: super().setUp() - settings = self._settings._replace(REWARD_SPEND_MIN_BLOCKS=1) # for simplicity + settings = self._settings.model_copy(update={'REWARD_SPEND_MIN_BLOCKS': 1}) # for simplicity daa = DifficultyAdjustmentAlgorithm(settings=settings, test_mode=TestMode.TEST_ALL_WEIGHT) builder = self.get_builder(settings).set_daa(daa) self.manager = self.create_peer_from_builder(builder) diff --git a/hathor_tests/others/test_hathor_settings.py b/hathor_tests/others/test_hathor_settings.py index 9b22dcb06..6fb718dc4 100644 --- a/hathor_tests/others/test_hathor_settings.py +++ b/hathor_tests/others/test_hathor_settings.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - from pathlib import Path from typing import Any from unittest.mock import Mock, patch @@ -20,9 +19,10 @@ from pydantic import ValidationError from hathor.checkpoint import Checkpoint -from hathor.conf import MAINNET_SETTINGS_FILEPATH from hathor.conf.mainnet import SETTINGS as MAINNET_SETTINGS from hathor.conf.settings import DECIMAL_PLACES, GENESIS_TOKEN_UNITS, GENESIS_TOKENS, HathorSettings +from hathorlib.conf import MAINNET_SETTINGS_FILEPATH +from hathorlib.conf.utils import load_yaml_settings @pytest.mark.parametrize('filepath', ['fixtures/valid_hathor_settings_fixture.yml']) @@ -66,7 +66,7 @@ def test_valid_hathor_settings_from_yaml(filepath): BLOCK_DIFFICULTY_N_BLOCKS=20, ) - assert expected_hathor_settings == HathorSettings.from_yaml(filepath=settings_filepath) + assert expected_hathor_settings == load_yaml_settings(HathorSettings, filepath=settings_filepath) @pytest.mark.parametrize( @@ -84,7 +84,7 @@ def test_invalid_hathor_settings_from_yaml(filepath, error): settings_filepath = str(parent_dir / filepath) with pytest.raises(ValidationError) as e: - HathorSettings.from_yaml(filepath=settings_filepath) + load_yaml_settings(HathorSettings, filepath=settings_filepath) errors = e.value.errors() assert errors[0]['msg'] == error @@ -95,58 +95,58 @@ def test_missing_hathor_settings_from_yaml(filepath): parent_dir = Path(__file__).parent settings_filepath = str(parent_dir / filepath) - with pytest.raises(TypeError) as e: - HathorSettings.from_yaml(filepath=settings_filepath) + with pytest.raises(ValidationError) as e: + load_yaml_settings(HathorSettings, filepath=settings_filepath) - assert "missing 1 required positional argument: 'NETWORK_NAME'" in str(e.value) + assert "validation error for HathorSettings\nNETWORK_NAME" in str(e.value) def test_tokens() -> None: yaml_mock = Mock() required_settings = dict(P2PKH_VERSION_BYTE='x01', MULTISIG_VERSION_BYTE='x02', NETWORK_NAME='test') - def mock_settings(settings_: dict[str, Any]) -> None: - yaml_mock.dict_from_extended_yaml = Mock(return_value=required_settings | settings_) + def mock_settings(mock: Mock, settings_: dict[str, Any]) -> None: + mock.return_value = required_settings | settings_ - with patch('hathor.conf.settings.yaml', yaml_mock): + with patch('hathorlib.utils.yaml.dict_from_extended_yaml', yaml_mock): # Test default values passes - mock_settings(dict( + mock_settings(yaml_mock, dict( GENESIS_TOKENS=GENESIS_TOKENS, GENESIS_TOKEN_UNITS=GENESIS_TOKEN_UNITS, DECIMAL_PLACES=DECIMAL_PLACES, )) - HathorSettings.from_yaml(filepath='some_path') + load_yaml_settings(HathorSettings, filepath='some_path') # Test failures - mock_settings(dict( + mock_settings(yaml_mock, dict( GENESIS_TOKENS=GENESIS_TOKENS + 1, GENESIS_TOKEN_UNITS=GENESIS_TOKEN_UNITS, DECIMAL_PLACES=DECIMAL_PLACES, )) with pytest.raises(ValidationError) as e: - HathorSettings.from_yaml(filepath='some_path') + load_yaml_settings(HathorSettings, filepath='some_path') assert ( 'invalid tokens: GENESIS_TOKENS=100000000001, GENESIS_TOKEN_UNITS=1000000000, DECIMAL_PLACES=2' ) in str(e.value) - mock_settings(dict( + mock_settings(yaml_mock, dict( GENESIS_TOKENS=GENESIS_TOKENS, GENESIS_TOKEN_UNITS=GENESIS_TOKEN_UNITS + 1, DECIMAL_PLACES=DECIMAL_PLACES, )) with pytest.raises(ValidationError) as e: - HathorSettings.from_yaml(filepath='some_path') + load_yaml_settings(HathorSettings, filepath='some_path') assert ( 'invalid tokens: GENESIS_TOKENS=100000000000, GENESIS_TOKEN_UNITS=1000000001, DECIMAL_PLACES=2' ) in str(e.value) - mock_settings(dict( + mock_settings(yaml_mock, dict( GENESIS_TOKENS=GENESIS_TOKENS, GENESIS_TOKEN_UNITS=GENESIS_TOKEN_UNITS, DECIMAL_PLACES=DECIMAL_PLACES + 1, )) with pytest.raises(ValidationError) as e: - HathorSettings.from_yaml(filepath='some_path') + load_yaml_settings(HathorSettings, filepath='some_path') assert ( 'invalid tokens: GENESIS_TOKENS=100000000000, GENESIS_TOKEN_UNITS=1000000000, DECIMAL_PLACES=3' ) in str(e.value) @@ -156,103 +156,104 @@ def test_token_deposit_percentage() -> None: yaml_mock = Mock() required_settings = dict(P2PKH_VERSION_BYTE='x01', MULTISIG_VERSION_BYTE='x02', NETWORK_NAME='test') - def mock_settings(settings_: dict[str, Any]) -> None: - yaml_mock.dict_from_extended_yaml = Mock(return_value=required_settings | settings_) + def mock_settings(mock: Mock, settings_: dict[str, Any]) -> None: + mock.return_value = required_settings | settings_ - with patch('hathor.conf.settings.yaml', yaml_mock): + with patch('hathorlib.utils.yaml.dict_from_extended_yaml', yaml_mock): # Test default value passes (0.01 results in FEE_DIVISOR=100) - mock_settings(dict(TOKEN_DEPOSIT_PERCENTAGE=0.01)) - HathorSettings.from_yaml(filepath='some_path') + mock_settings(yaml_mock, dict(TOKEN_DEPOSIT_PERCENTAGE=0.01)) + load_yaml_settings(HathorSettings, filepath='some_path') # Test fails when TOKEN_DEPOSIT_PERCENTAGE results in non-integer FEE_DIVISOR (0.03 -> 33.333...) - mock_settings(dict(TOKEN_DEPOSIT_PERCENTAGE=0.03)) + mock_settings(yaml_mock, dict(TOKEN_DEPOSIT_PERCENTAGE=0.03)) with pytest.raises(ValidationError) as e: - HathorSettings.from_yaml(filepath='some_path') + load_yaml_settings(HathorSettings, filepath='some_path') assert 'TOKEN_DEPOSIT_PERCENTAGE must result in an integer FEE_DIVISOR' in str(e.value) assert 'TOKEN_DEPOSIT_PERCENTAGE=0.03' in str(e.value) # Test fails when TOKEN_DEPOSIT_PERCENTAGE results in non-integer FEE_DIVISOR (0.07 -> 14.285...) - mock_settings(dict(TOKEN_DEPOSIT_PERCENTAGE=0.07)) + mock_settings(yaml_mock, dict(TOKEN_DEPOSIT_PERCENTAGE=0.07)) with pytest.raises(ValidationError) as e: - HathorSettings.from_yaml(filepath='some_path') + load_yaml_settings(HathorSettings, filepath='some_path') assert 'TOKEN_DEPOSIT_PERCENTAGE must result in an integer FEE_DIVISOR' in str(e.value) assert 'TOKEN_DEPOSIT_PERCENTAGE=0.07' in str(e.value) def test_consensus_algorithm() -> None: - yaml_mock = Mock() required_settings = dict(P2PKH_VERSION_BYTE='x01', MULTISIG_VERSION_BYTE='x02', NETWORK_NAME='test') + yaml_mock = Mock(return_value=required_settings) - def mock_settings(settings_: dict[str, Any]) -> None: - yaml_mock.dict_from_extended_yaml = Mock(return_value=required_settings | settings_) + def mock_settings(mock: Mock, settings_: dict[str, Any]) -> None: + mock.return_value = required_settings | settings_ + # mock = Mock(return_value=required_settings | settings_) - with patch('hathor.conf.settings.yaml', yaml_mock): + with patch('hathorlib.utils.yaml.dict_from_extended_yaml', yaml_mock): # Test passes when PoA is disabled with default settings - mock_settings(dict()) - HathorSettings.from_yaml(filepath='some_path') + mock_settings(yaml_mock, dict()) + load_yaml_settings(HathorSettings, filepath='some_path') # Test fails when PoA is enabled with default settings - mock_settings(dict( + mock_settings(yaml_mock, dict( CONSENSUS_ALGORITHM=dict( type='PROOF_OF_AUTHORITY', signers=(dict(public_key=b'some_signer'),) ) )) with pytest.raises(ValidationError) as e: - HathorSettings.from_yaml(filepath='some_path') + load_yaml_settings(HathorSettings, filepath='some_path') assert 'PoA networks do not support block rewards' in str(e.value) # Test passes when PoA is enabled without block rewards - mock_settings(dict( + mock_settings(yaml_mock, dict( BLOCKS_PER_HALVING=None, INITIAL_TOKEN_UNITS_PER_BLOCK=0, MINIMUM_TOKEN_UNITS_PER_BLOCK=0, CONSENSUS_ALGORITHM=dict(type='PROOF_OF_AUTHORITY', signers=(dict(public_key=b'some_signer'),)), )) - HathorSettings.from_yaml(filepath='some_path') + load_yaml_settings(HathorSettings, filepath='some_path') # Test fails when no signer is provided - mock_settings(dict( + mock_settings(yaml_mock, dict( BLOCKS_PER_HALVING=None, INITIAL_TOKEN_UNITS_PER_BLOCK=0, MINIMUM_TOKEN_UNITS_PER_BLOCK=0, CONSENSUS_ALGORITHM=dict(type='PROOF_OF_AUTHORITY', signers=()), )) with pytest.raises(ValidationError) as e: - HathorSettings.from_yaml(filepath='some_path') + load_yaml_settings(HathorSettings, filepath='some_path') assert 'At least one signer must be provided in PoA networks' in str(e.value) # Test fails when PoA is enabled with BLOCKS_PER_HALVING - mock_settings(dict( + mock_settings(yaml_mock, dict( BLOCKS_PER_HALVING=123, INITIAL_TOKEN_UNITS_PER_BLOCK=0, MINIMUM_TOKEN_UNITS_PER_BLOCK=0, CONSENSUS_ALGORITHM=dict(type='PROOF_OF_AUTHORITY', signers=(dict(public_key=b'some_signer'),)), )) with pytest.raises(ValidationError) as e: - HathorSettings.from_yaml(filepath='some_path') + load_yaml_settings(HathorSettings, filepath='some_path') assert 'PoA networks do not support block rewards' in str(e.value) # Test fails when PoA is enabled with INITIAL_TOKEN_UNITS_PER_BLOCK - mock_settings(dict( + mock_settings(yaml_mock, dict( BLOCKS_PER_HALVING=None, INITIAL_TOKEN_UNITS_PER_BLOCK=123, MINIMUM_TOKEN_UNITS_PER_BLOCK=0, CONSENSUS_ALGORITHM=dict(type='PROOF_OF_AUTHORITY', signers=(dict(public_key=b'some_signer'),)), )) with pytest.raises(ValidationError) as e: - HathorSettings.from_yaml(filepath='some_path') + load_yaml_settings(HathorSettings, filepath='some_path') assert 'PoA networks do not support block rewards' in str(e.value) # Test fails when PoA is enabled with MINIMUM_TOKEN_UNITS_PER_BLOCK - mock_settings(dict( + mock_settings(yaml_mock, dict( BLOCKS_PER_HALVING=None, INITIAL_TOKEN_UNITS_PER_BLOCK=0, MINIMUM_TOKEN_UNITS_PER_BLOCK=123, CONSENSUS_ALGORITHM=dict(type='PROOF_OF_AUTHORITY', signers=(dict(public_key=b'some_signer'),)), )) with pytest.raises(ValidationError) as e: - HathorSettings.from_yaml(filepath='some_path') + load_yaml_settings(HathorSettings, filepath='some_path') assert 'PoA networks do not support block rewards' in str(e.value) @@ -261,4 +262,4 @@ def mock_settings(settings_: dict[str, Any]) -> None: def test_mainnet_settings_migration(): - assert MAINNET_SETTINGS == HathorSettings.from_yaml(filepath=MAINNET_SETTINGS_FILEPATH) + assert MAINNET_SETTINGS == load_yaml_settings(HathorSettings, filepath=MAINNET_SETTINGS_FILEPATH) diff --git a/hathor_tests/p2p/test_whitelist.py b/hathor_tests/p2p/test_whitelist.py index b7619025c..45e79639b 100644 --- a/hathor_tests/p2p/test_whitelist.py +++ b/hathor_tests/p2p/test_whitelist.py @@ -5,7 +5,6 @@ from twisted.web.client import Agent from hathor.conf.get_settings import get_global_settings -from hathor.conf.settings import HathorSettings from hathor.manager import HathorManager from hathor.p2p.manager import WHITELIST_REQUEST_TIMEOUT from hathor.p2p.sync_version import SyncVersion @@ -16,7 +15,7 @@ class WhitelistTestCase(unittest.TestCase): def test_whitelist_no_no(self) -> None: network = 'testnet' - self._settings = get_global_settings()._replace(ENABLE_PEER_WHITELIST=True) + self._settings = get_global_settings().model_copy(update={'ENABLE_PEER_WHITELIST': True}) manager1 = self.create_peer(network) self.assertEqual(manager1.connections.get_enabled_sync_versions(), {SyncVersion.V2}) @@ -38,7 +37,7 @@ def test_whitelist_no_no(self) -> None: def test_whitelist_yes_no(self) -> None: network = 'testnet' - self._settings = get_global_settings()._replace(ENABLE_PEER_WHITELIST=True) + self._settings = get_global_settings().model_copy(update={'ENABLE_PEER_WHITELIST': True}) manager1 = self.create_peer(network) self.assertEqual(manager1.connections.get_enabled_sync_versions(), {SyncVersion.V2}) @@ -62,7 +61,7 @@ def test_whitelist_yes_no(self) -> None: def test_whitelist_yes_yes(self) -> None: network = 'testnet' - self._settings = get_global_settings()._replace(ENABLE_PEER_WHITELIST=True) + self._settings = get_global_settings().model_copy(update={'ENABLE_PEER_WHITELIST': True}) manager1 = self.create_peer(network) self.assertEqual(manager1.connections.get_enabled_sync_versions(), {SyncVersion.V2}) @@ -90,7 +89,7 @@ def test_update_whitelist(self) -> None: manager: HathorManager = self.create_peer(network) connections_manager = manager.connections - settings_mock = Mock(spec_set=HathorSettings) + settings_mock = Mock() settings_mock.WHITELIST_URL = 'some_url' connections_manager._settings = settings_mock diff --git a/hathor_tests/poa/test_poa.py b/hathor_tests/poa/test_poa.py index 65ad84e2b..13d9ec526 100644 --- a/hathor_tests/poa/test_poa.py +++ b/hathor_tests/poa/test_poa.py @@ -18,7 +18,6 @@ from cryptography.hazmat.primitives.asymmetric import ec from pydantic import ValidationError -from hathor.conf.settings import HathorSettings from hathor.consensus import poa from hathor.consensus.consensus_settings import PoaSettings, PoaSignerSettings from hathor.consensus.poa.poa_signer import PoaSigner, PoaSignerFile @@ -94,7 +93,7 @@ def get_signer() -> tuple[PoaSigner, bytes]: return file.get_signer(), public_key_bytes poa_signer, public_key_bytes = get_signer() - settings = Mock(spec_set=HathorSettings) + settings = Mock() settings.CONSENSUS_ALGORITHM = PoaSettings.model_construct(signers=()) settings.AVG_TIME_BETWEEN_BLOCKS = 30 block_verifier = PoaBlockVerifier(settings=settings) diff --git a/hathor_tests/poa/test_poa_verification.py b/hathor_tests/poa/test_poa_verification.py index d931ef8a7..b060c39f6 100644 --- a/hathor_tests/poa/test_poa_verification.py +++ b/hathor_tests/poa/test_poa_verification.py @@ -35,14 +35,16 @@ def setUp(self) -> None: public_key = self.signer.get_public_key() public_key_bytes = get_public_key_bytes_compressed(public_key) - settings = self._settings._replace( - BLOCKS_PER_HALVING=None, - INITIAL_TOKEN_UNITS_PER_BLOCK=0, - MINIMUM_TOKEN_UNITS_PER_BLOCK=0, - CONSENSUS_ALGORITHM=PoaSettings( - type=ConsensusType.PROOF_OF_AUTHORITY, - signers=(PoaSignerSettings(public_key=public_key_bytes),), - ), + settings = self._settings.model_copy( + update={ + 'BLOCKS_PER_HALVING': None, + 'INITIAL_TOKEN_UNITS_PER_BLOCK': 0, + 'MINIMUM_TOKEN_UNITS_PER_BLOCK': 0, + 'CONSENSUS_ALGORITHM': PoaSettings( + type=ConsensusType.PROOF_OF_AUTHORITY, + signers=(PoaSignerSettings(public_key=public_key_bytes),), + ) + }, ) builder = self.get_builder().set_settings(settings) diff --git a/hathor_tests/poa/utils.py b/hathor_tests/poa/utils.py index 096045c4f..033a4ca04 100644 --- a/hathor_tests/poa/utils.py +++ b/hathor_tests/poa/utils.py @@ -40,14 +40,14 @@ def get_settings( signers.append(poa_settings) settings = get_global_settings() - settings = settings._replace( - AVG_TIME_BETWEEN_BLOCKS=time_between_blocks or settings.AVG_TIME_BETWEEN_BLOCKS, - BLOCKS_PER_HALVING=None, - INITIAL_TOKEN_UNITS_PER_BLOCK=0, - MINIMUM_TOKEN_UNITS_PER_BLOCK=0, - CONSENSUS_ALGORITHM=PoaSettings( + settings = settings.model_copy(update={ + 'AVG_TIME_BETWEEN_BLOCKS': time_between_blocks or settings.AVG_TIME_BETWEEN_BLOCKS, + 'BLOCKS_PER_HALVING': None, + 'INITIAL_TOKEN_UNITS_PER_BLOCK': 0, + 'MINIMUM_TOKEN_UNITS_PER_BLOCK': 0, + 'CONSENSUS_ALGORITHM': PoaSettings( type=ConsensusType.PROOF_OF_AUTHORITY, signers=tuple(signers), ), - ) + }) return settings diff --git a/hathor_tests/resources/feature/test_feature.py b/hathor_tests/resources/feature/test_feature.py index 2752e8e78..c78c60ec7 100644 --- a/hathor_tests/resources/feature/test_feature.py +++ b/hathor_tests/resources/feature/test_feature.py @@ -69,15 +69,17 @@ def get_state(*, block: Block, feature: Feature) -> FeatureState: Feature.NOP_FEATURE_2: FeatureInfo(state=FeatureState.LOCKED_IN, criteria=nop_feature_2_criteria), }) - settings = get_global_settings()._replace( - FEATURE_ACTIVATION=FeatureSettings( - evaluation_interval=4, - default_threshold=3, - features={ - Feature.NOP_FEATURE_1: nop_feature_1_criteria, - Feature.NOP_FEATURE_2: nop_feature_2_criteria - } - ) + settings = get_global_settings().model_copy( + update={ + 'FEATURE_ACTIVATION': FeatureSettings( + evaluation_interval=4, + default_threshold=3, + features={ + Feature.NOP_FEATURE_1: nop_feature_1_criteria, + Feature.NOP_FEATURE_2: nop_feature_2_criteria + } + ) + } ) feature_resource = FeatureResource( diff --git a/hathor_tests/tx/test_block.py b/hathor_tests/tx/test_block.py index 2462466ee..9c68c59c9 100644 --- a/hathor_tests/tx/test_block.py +++ b/hathor_tests/tx/test_block.py @@ -17,7 +17,6 @@ import pytest from hathor.conf.get_settings import get_global_settings -from hathor.conf.settings import HathorSettings from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import BlockIsMissingSignal, BlockIsSignaling, FeatureService from hathor.transaction import Block @@ -139,7 +138,7 @@ def test_get_feature_activation_bit_value() -> None: def test_verify_must_signal() -> None: - settings = Mock(spec_set=HathorSettings) + settings = Mock() settings.CHECKPOINTS = [] feature_service = Mock(spec_set=FeatureService) feature_service.is_signaling_mandatory_features = Mock( @@ -155,7 +154,7 @@ def test_verify_must_signal() -> None: def test_verify_must_not_signal() -> None: - settings = Mock(spec_set=HathorSettings) + settings = Mock() settings.CHECKPOINTS = [] feature_service = Mock(spec_set=FeatureService) feature_service.is_signaling_mandatory_features = Mock(return_value=BlockIsSignaling()) diff --git a/hathor_tests/tx/test_fee_tokens.py b/hathor_tests/tx/test_fee_tokens.py index a58972201..50a9e2f7a 100644 --- a/hathor_tests/tx/test_fee_tokens.py +++ b/hathor_tests/tx/test_fee_tokens.py @@ -14,7 +14,6 @@ import pytest -from hathor.conf.settings import FeatureSetting from hathor.crypto.util import decode_address from hathor.exception import InvalidNewTransaction from hathor.indexes.tokens_index import TokenUtxoInfo @@ -29,6 +28,7 @@ from hathor_tests import unittest from hathor_tests.dag_builder.builder import TestDAGBuilder from hathor_tests.utils import add_blocks_unlock_reward, create_fee_tokens, create_tokens, get_genesis_key +from hathorlib.conf.settings import FeatureSetting class FeeTokenTest(unittest.TestCase): @@ -634,7 +634,7 @@ def test_fee_token_activation(self) -> None: 'testnet', unlock_wallet=True, wallet_index=True, - settings=self._settings._replace(ENABLE_FEE_BASED_TOKENS=FeatureSetting.DISABLED), + settings=self._settings.model_copy(update={'ENABLE_FEE_BASED_TOKENS': FeatureSetting.DISABLED}), ) with pytest.raises(InvalidNewTransaction) as e: create_fee_tokens(custom_manager, self.address_b58) diff --git a/hathor_tests/tx/test_mempool_tips_index.py b/hathor_tests/tx/test_mempool_tips_index.py index 576e708cf..adee3ad95 100644 --- a/hathor_tests/tx/test_mempool_tips_index.py +++ b/hathor_tests/tx/test_mempool_tips_index.py @@ -24,7 +24,8 @@ class TestMempoolTipsIndex(unittest.TestCase): def setUp(self) -> None: super().setUp() - settings = self._settings._replace(REWARD_SPEND_MIN_BLOCKS=1) # for simplicity + assert self._settings is not None + settings = self._settings.model_copy(update={'REWARD_SPEND_MIN_BLOCKS': 1}) # for simplicity daa = DifficultyAdjustmentAlgorithm(settings=settings, test_mode=TestMode.TEST_ALL_WEIGHT) builder = self.get_builder(settings).set_daa(daa) diff --git a/hathor_tests/tx/test_verification_mempool.py b/hathor_tests/tx/test_verification_mempool.py index f7019dd20..134164d0d 100644 --- a/hathor_tests/tx/test_verification_mempool.py +++ b/hathor_tests/tx/test_verification_mempool.py @@ -362,7 +362,8 @@ def test_checkpoints(self) -> Generator: blk = artifacts.get_typed_vertex(f'b{height}', Block) checkpoints.append(Checkpoint(height=height, hash=blk.hash)) - new_settings = self._settings._replace(CHECKPOINTS=checkpoints) + assert self._settings is not None + new_settings = self._settings.model_copy(update={'CHECKPOINTS': checkpoints}) manager2 = self.create_peer('unittests', settings=new_settings) assert [(cp.height, cp.hash) for cp in manager2.checkpoints] == [(cp.height, cp.hash) for cp in checkpoints] artifacts.propagate_with(manager2, up_to='b30') diff --git a/hathor_tests/unittest.py b/hathor_tests/unittest.py index cf4138a50..bbcde3d48 100644 --- a/hathor_tests/unittest.py +++ b/hathor_tests/unittest.py @@ -210,7 +210,7 @@ def create_peer( # type: ignore[no-untyped-def] settings: HathorSettings | None = None, ): # TODO: Add -> HathorManager here. It breaks the lint in a lot of places. - settings = (settings or self._settings)._replace(NETWORK_NAME=network) + settings = (settings or self._settings).model_copy(update={'NETWORK_NAME': network}) builder = self.get_builder() \ .set_settings(settings) diff --git a/hathor_tests/utils_modules/test_named_tuple.py b/hathor_tests/utils_modules/test_named_tuple.py deleted file mode 100644 index c05d46b95..000000000 --- a/hathor_tests/utils_modules/test_named_tuple.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2023 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import NamedTuple - -import pydantic -import pytest -from pydantic import ValidationError - -from hathor.utils.named_tuple import validated_named_tuple_from_dict -from hathor.utils.pydantic import BaseModel - - -class InnerTuple(NamedTuple): - x: str - - -class InnerModel(BaseModel): - y: str - - -class OuterTuple(NamedTuple): - a: int - b: InnerTuple - c: InnerModel - - -def _validate_a(a: int) -> int: - """Validator for 'a' field - must not be greater than 10.""" - if a > 10: - raise ValueError('"a" cannot be greater than 10') - return a - - -VALIDATORS = dict( - validate_a=pydantic.field_validator('a', mode='before')(_validate_a) -) - - -@pytest.mark.parametrize( - ['attributes', 'expected'], - [ - ( - dict(a=0, b=('b',), c=dict(y='c')), - OuterTuple(a=0, b=InnerTuple(x='b'), c=InnerModel(y='c')) - ), - ( - dict(a=5, b=('bb',), c=dict(y='cc')), - OuterTuple(a=5, b=InnerTuple(x='bb'), c=InnerModel(y='cc')) - ), - ( - dict(a=10, b=('bbb',), c=dict(y='ccc')), - OuterTuple(a=10, b=InnerTuple(x='bbb'), c=InnerModel(y='ccc')) - ), - ] -) -def test_validated_named_tuple_from_dict(attributes, expected): - result = validated_named_tuple_from_dict(OuterTuple, attributes, validators=VALIDATORS) - - assert isinstance(result.b, InnerTuple) - assert isinstance(result.c, InnerModel) - assert result == expected - - -@pytest.mark.parametrize( - 'attributes', - [ - dict(a=11, b=('b',), c=dict(y='c')), - dict(a=50, b=('bb',), c=dict(y='cc')), - dict(a=100, b=('bbb',), c=dict(y='ccc')), - ] -) -def test_validated_named_tuple_from_dict_error(attributes): - with pytest.raises(ValidationError) as e: - validated_named_tuple_from_dict(OuterTuple, attributes, validators=VALIDATORS) - - errors = e.value.errors() - assert errors[0]['msg'] == 'Value error, "a" cannot be greater than 10' diff --git a/hathor/p2p/ca.crt b/hathorlib/hathorlib/cert/ca.crt similarity index 100% rename from hathor/p2p/ca.crt rename to hathorlib/hathorlib/cert/ca.crt diff --git a/hathor/p2p/ca.key b/hathorlib/hathorlib/cert/ca.key similarity index 100% rename from hathor/p2p/ca.key rename to hathorlib/hathorlib/cert/ca.key diff --git a/hathorlib/hathorlib/conf/__init__.py b/hathorlib/hathorlib/conf/__init__.py index 35b6d7f33..92b78695e 100644 --- a/hathorlib/hathorlib/conf/__init__.py +++ b/hathorlib/hathorlib/conf/__init__.py @@ -1,5 +1,20 @@ +from pathlib import Path + from hathorlib.conf.get_settings import HathorSettings +parent_dir = Path(__file__).parent + +MAINNET_SETTINGS_FILEPATH = str(parent_dir / 'mainnet.yml') +TESTNET_INDIA_SETTINGS_FILEPATH = str(parent_dir / 'testnet.yml') +NANO_TESTNET_SETTINGS_FILEPATH = str(parent_dir / 'nano_testnet.yml') +LOCALNET_SETTINGS_FILEPATH = str(parent_dir / 'localnet.yml') +UNITTESTS_SETTINGS_FILEPATH = str(parent_dir / 'unittests.yml') + __all__ = [ + 'MAINNET_SETTINGS_FILEPATH', + 'TESTNET_INDIA_SETTINGS_FILEPATH', + 'NANO_TESTNET_SETTINGS_FILEPATH', + 'LOCALNET_SETTINGS_FILEPATH', + 'UNITTESTS_SETTINGS_FILEPATH', 'HathorSettings', ] diff --git a/hathorlib/hathorlib/conf/get_settings.py b/hathorlib/hathorlib/conf/get_settings.py index e8de30930..6c65f491a 100644 --- a/hathorlib/hathorlib/conf/get_settings.py +++ b/hathorlib/hathorlib/conf/get_settings.py @@ -1,26 +1,53 @@ -import importlib import os +from typing import NamedTuple, Optional from hathorlib.conf.settings import HathorSettings as Settings +from hathorlib.conf.utils import load_module_settings, load_yaml_settings _config_file = None +class _SettingsMetadata(NamedTuple): + source: str + is_yaml: bool + settings: Settings + + +_settings_singleton: Optional[_SettingsMetadata] = None + + def HathorSettings() -> Settings: """ Return configuration file namedtuple Get the file from environment variable 'TXMINING_CONFIG_FILE' If not set we return the config file of the mainnet """ - global _config_file - # Import config file for network - default_file = 'hathorlib.conf.mainnet' - config_file = os.environ.get('TXMINING_CONFIG_FILE', default_file) - if _config_file is None: - _config_file = config_file - elif _config_file != config_file: - raise Exception('loading config twice with a different file') - try: - module = importlib.import_module(config_file) - except ModuleNotFoundError: - module = importlib.import_module(default_file) - return module.SETTINGS # type: ignore + settings_module_filepath = os.environ.get('HATHOR_CONFIG_FILE') + if settings_module_filepath is not None: + return _load_settings_singleton(settings_module_filepath, is_yaml=False) + + settings_yaml_filepath = os.environ.get('HATHOR_CONFIG_YAML') + if settings_yaml_filepath is not None: + return _load_settings_singleton(settings_yaml_filepath, is_yaml=True) + + return _load_settings_singleton('hathorlib.conf.mainnet', is_yaml=False) + + +def _load_settings_singleton(source: str, *, is_yaml: bool) -> Settings: + global _settings_singleton + + if _settings_singleton is not None: + if _settings_singleton.is_yaml != is_yaml: + raise Exception('loading config twice with a different file type') + if _settings_singleton.source != source: + raise Exception('loading config twice with a different file') + + return _settings_singleton.settings + + settings_loader = load_yaml_settings if is_yaml else load_module_settings + _settings_singleton = _SettingsMetadata( + source=source, + is_yaml=is_yaml, + settings=settings_loader(Settings, source) + ) + + return _settings_singleton.settings diff --git a/hathor/conf/localnet.yml b/hathorlib/hathorlib/conf/localnet.yml similarity index 100% rename from hathor/conf/localnet.yml rename to hathorlib/hathorlib/conf/localnet.yml diff --git a/hathor/conf/mainnet.yml b/hathorlib/hathorlib/conf/mainnet.yml similarity index 100% rename from hathor/conf/mainnet.yml rename to hathorlib/hathorlib/conf/mainnet.yml diff --git a/hathor/conf/nano_testnet.yml b/hathorlib/hathorlib/conf/nano_testnet.yml similarity index 100% rename from hathor/conf/nano_testnet.yml rename to hathorlib/hathorlib/conf/nano_testnet.yml diff --git a/hathorlib/hathorlib/conf/settings.py b/hathorlib/hathorlib/conf/settings.py index 8aff63386..cb8829247 100644 --- a/hathorlib/hathorlib/conf/settings.py +++ b/hathorlib/hathorlib/conf/settings.py @@ -5,18 +5,53 @@ LICENSE file in the root directory of this source tree. """ -from typing import NamedTuple +import os +from enum import StrEnum, auto, unique +from math import log +from typing import Annotated, Optional +from pydantic import BaseModel, BeforeValidator, ConfigDict, computed_field, field_validator, model_validator +from typing_extensions import Self + +from hathorlib.conf.utils import parse_hex_str + + +@unique +class FeatureSetting(StrEnum): + """Enum to configure the state of a feature.""" + + # Completely disabled. + DISABLED = auto() + + # Completely enabled since network creation. + ENABLED = auto() + + # Enabled through Feature Activation. + FEATURE_ACTIVATION = auto() + + def __bool__(self) -> bool: + """ + >>> bool(FeatureSetting.DISABLED) + False + >>> bool(FeatureSetting.ENABLED) + True + >>> bool(FeatureSetting.FEATURE_ACTIVATION) + True + """ + return self in (FeatureSetting.ENABLED, FeatureSetting.FEATURE_ACTIVATION) + + +class HathorSettings(BaseModel): + model_config = ConfigDict(extra='ignore') -class HathorSettings(NamedTuple): # Name of the network: "mainnet", "testnet-alpha", "testnet-bravo", ... NETWORK_NAME: str # Version byte of the address in P2PKH - P2PKH_VERSION_BYTE: bytes + P2PKH_VERSION_BYTE: Annotated[bytes, BeforeValidator(parse_hex_str)] # Version byte of the address in MultiSig - MULTISIG_VERSION_BYTE: bytes + MULTISIG_VERSION_BYTE: Annotated[bytes, BeforeValidator(parse_hex_str)] # HTR Token UID HATHOR_TOKEN_UID: bytes = b'\x00' @@ -33,6 +68,10 @@ class HathorSettings(NamedTuple): # Symbol of the Hathor token HATHOR_TOKEN_SYMBOL: str = 'HTR' + # The name and symbol of the native token. This is only used in APIs to serve clients. + NATIVE_TOKEN_NAME: str = 'Hathor' + NATIVE_TOKEN_SYMBOL: str = 'HTR' + # Number of decimal places for the Hathor token DECIMAL_PLACES: int = 2 @@ -56,3 +95,459 @@ class HathorSettings(NamedTuple): # Max length in bytes allowed for on-chain blueprint code inside the transaction, 24KB (not KiB) NC_ON_CHAIN_BLUEPRINT_CODE_MAX_SIZE_COMPRESSED: int = 24_000 + + # Initial bootstrap servers + BOOTSTRAP_DNS: list[str] = [] + + # enable peer whitelist + ENABLE_PEER_WHITELIST: bool = False + + # weather to use the whitelist with sync-v2 peers, does not affect whether the whitelist is enabled or not, it will + # always be enabled for sync-v1 if it is enabled + USE_PEER_WHITELIST_ON_SYNC_V2: bool = True + + # Genesis pre-mined tokens + GENESIS_TOKEN_UNITS: int = 1 * (10 ** 9) # 1B + + GENESIS_TOKENS: int = 1 * (10 ** 9) * (10 ** 2) # 100B = GENESIS_TOKEN_UNITS * (10 ** DECIMAL_PLACES) + + # Fee rate settings + FEE_PER_OUTPUT: int = 1 + + @computed_field # type: ignore[prop-decorator] + @property + def FEE_DIVISOR(self) -> int: + """Divisor used for evaluating fee amounts""" + result = 1 / self.TOKEN_DEPOSIT_PERCENTAGE + assert result.is_integer() + return int(result) + + # To disable reward halving, just set this to `None` and make sure that INITIAL_TOKEN_UNITS_PER_BLOCK is equal to + # MINIMUM_TOKEN_UNITS_PER_BLOCK. + BLOCKS_PER_HALVING: Optional[int] = 2 * 60 * 24 * 365 # 1051200, every 365 days + + INITIAL_TOKEN_UNITS_PER_BLOCK: int = 64 + MINIMUM_TOKEN_UNITS_PER_BLOCK: int = 8 + + @computed_field # type: ignore[prop-decorator] + @property + def INITIAL_TOKENS_PER_BLOCK(self) -> int: + return int(self.INITIAL_TOKEN_UNITS_PER_BLOCK * (10 ** self.DECIMAL_PLACES)) + + @computed_field # type: ignore[prop-decorator] + @property + def MINIMUM_TOKENS_PER_BLOCK(self) -> int: + return int(self.MINIMUM_TOKEN_UNITS_PER_BLOCK * (10 ** self.DECIMAL_PLACES)) + + # Assume that: amount < minimum + # But, amount = initial / (2**n), where n = number_of_halvings. Thus: + # initial / (2**n) < minimum + # initial / minimum < 2**n + # 2**n > initial / minimum + # Applying log to both sides: + # n > log2(initial / minimum) + # n > log2(initial) - log2(minimum) + @computed_field # type: ignore[prop-decorator] + @property + def MAXIMUM_NUMBER_OF_HALVINGS(self) -> int: + return int(log(self.INITIAL_TOKEN_UNITS_PER_BLOCK, 2) - log(self.MINIMUM_TOKEN_UNITS_PER_BLOCK, 2)) + + # Average time between blocks. + AVG_TIME_BETWEEN_BLOCKS: int = 30 # in seconds + + # Genesis pre-mined outputs + # P2PKH HMcJymyctyhnWsWTXqhP9txDwgNZaMWf42 + # + # To generate a new P2PKH script, run: + # >>> from hathor.transaction.scripts import P2PKH + # >>> import base58 + # >>> address = base58.b58decode('HMcJymyctyhnWsWTXqhP9txDwgNZaMWf42') + # >>> P2PKH.create_output_script(address=address).hex() + GENESIS_OUTPUT_SCRIPT: Annotated[bytes, BeforeValidator(parse_hex_str)] = ( + bytes.fromhex("76a914a584cf48b161e4a49223ed220df30037ab740e0088ac") + ) + + # Genesis timestamps, nonces and hashes + + # Timestamp used for the genesis block + GENESIS_BLOCK_TIMESTAMP: int = 1572636343 + + @computed_field # type: ignore[prop-decorator] + @property + def GENESIS_TX1_TIMESTAMP(self) -> int: + """Timestamp used for the first genesis transaction.""" + return self.GENESIS_BLOCK_TIMESTAMP + 1 + + @computed_field # type: ignore[prop-decorator] + @property + def GENESIS_TX2_TIMESTAMP(self) -> int: + """Timestamp used for the second genesis transaction.""" + return self.GENESIS_BLOCK_TIMESTAMP + 2 + + GENESIS_BLOCK_NONCE: int = 3526202 + GENESIS_BLOCK_HASH: Annotated[bytes, BeforeValidator(parse_hex_str)] = ( + bytes.fromhex( + "000007eb968a6cdf0499e2d033faf1e163e0dc9cf41876acad4d421836972038" + ) + ) + GENESIS_TX1_NONCE: int = 12595 + GENESIS_TX1_HASH: Annotated[bytes, BeforeValidator(parse_hex_str)] = bytes.fromhex( + "00025d75e44804a6a6a099f4320471c864b38d37b79b496ee26080a2a1fd5b7b" + ) + GENESIS_TX2_NONCE: int = 21301 + GENESIS_TX2_HASH: Annotated[bytes, BeforeValidator(parse_hex_str)] = bytes.fromhex( + "0002c187ab30d4f61c11a5dc43240bdf92dba4d19f40f1e883b0a5fdac54ef53" + ) + + # Weight of genesis and minimum weight of a tx/block + MIN_BLOCK_WEIGHT: int = 21 + MIN_SHARE_WEIGHT: int = 21 + + # Maximum distance between two consecutive blocks (in seconds), except for genesis. + # This prevent some DoS attacks exploiting the calculation of the score of a side chain. + # P(t > T) = exp(-MAX_DISTANCE_BETWEEN_BLOCKS / AVG_TIME_BETWEEN_BLOCKS) + # P(t > T) = exp(-35) = 6.3051e-16 + MAX_DISTANCE_BETWEEN_BLOCKS: int = 150 * 30 # AVG_TIME_BETWEEN_BLOCKS + + # Enable/disable weight decay. + WEIGHT_DECAY_ENABLED: bool = True + + # Minimum distance between two consecutive blocks that enables weight decay. + # Assuming that the hashrate is constant, the probability of activating is: + # P(t > T) = exp(-WEIGHT_DECAY_ACTIVATE_DISTANCE / AVG_TIME_BETWEEN_BLOCKS) + # P(t > T) = exp(-120) = 7.66e-53 + # But, if the hashrate drops 40 times, the expected time to find the next block + # becomes 40 * AVG_TIME_BETWEEN_BLOCKS = 20 minutes and the probability of + # activating the decay is exp(-3) = 0.05 = 5%. + WEIGHT_DECAY_ACTIVATE_DISTANCE: int = 120 * 30 # AVG_TIME_BETWEEN_BLOCKS + + # Window size of steps in which the weight is reduced when decaying is activated. + # The maximum number of steps is: + # max_steps = floor((MAX_DISTANCE_BETWEEN_BLOCKS - WEIGHT_DECAY_ACTIVATE_DISTANCE) / WEIGHT_DECAY_WINDOW_SIZE) + # Using these parameters, `max_steps = 15`. + WEIGHT_DECAY_WINDOW_SIZE: int = 60 + + # Amount to reduce the weight when decaying is activated. + # adj_weight = weight - decay + # difficulty = 2**adj_weight + # difficulty = 2**(weight - decay) + # difficulty = 2**weight / 2**decay + # As 2**(-2.73) = 0.15072, it reduces the mining difficulty for 15% of the original weight. + # Finally, the maximum decay is `max_steps * WEIGHT_DECAY_AMOUNT`. + # As `max_steps = 15`, then `max_decay = 2**(-15 * 2.73) = 4.71e-13`. + WEIGHT_DECAY_AMOUNT: float = 2.73 + + # Number of blocks to be found with the same hash algorithm as `block`. + # The bigger it is, the smaller the variance of the hash rate estimator is. + BLOCK_DIFFICULTY_N_BLOCKS: int = 134 + + # Size limit in bytes for Block data field + BLOCK_DATA_MAX_SIZE: int = 100 + + # Number of subfolders in the storage folder (used in JSONStorage and CompactStorage) + STORAGE_SUBFOLDERS: int = 256 + + # Maximum level of the neighborhood graph generated by graphviz + MAX_GRAPH_LEVEL: int = 3 + + # Maximum difference between our latest timestamp and a peer's synced timestamp to consider + # that the peer is synced (in seconds). + P2P_SYNC_THRESHOLD: int = 60 + + # This multiplier will be used to decide whether the fullnode has had recent activity in the p2p sync. + # This info will be used in the readiness endpoint as one of the checks. + # + # We will multiply it by the AVG_TIME_BETWEEN_BLOCKS, and compare the result with the gap between the + # current time and the latest timestamp in the database. + # + # If the gap is bigger than the calculated threshold, than we will say the fullnode is not ready (unhealthy). + # + # Using (P2P_RECENT_ACTIVITY_THRESHOLD_MULTIPLIER * AVG_TIME_BETWEEN_BLOCKS) as threshold will have false + # positives. + # The probability of a false positive is exp(-N), assuming the hash rate is constant during the period. + # In other words, a false positive is likely to occur every exp(N) blocks. If the hash rate decreases + # quickly, this probability gets bigger. + # + # For instance, P2P_RECENT_ACTIVITY_THRESHOLD_MULTIPLIER=5 would get a false positive every 90 minutes. + # For P2P_RECENT_ACTIVITY_THRESHOLD_MULTIPLIER=10, we would have a false positive every 8 days. + # For P2P_RECENT_ACTIVITY_THRESHOLD_MULTIPLIER=15, we would have a false positive every 3 years. + # + # On the other side, using higher numbers will lead to a higher delay for the fullnode to start reporting + # as not ready. + # + # So for use cases that may need more reponsiveness on this readiness check, at the cost of some eventual false + # positive, it could be a good idea to decrease the value in this setting. + P2P_RECENT_ACTIVITY_THRESHOLD_MULTIPLIER: int = 15 + + # Whether to warn the other peer of the reason for closing the connection + WHITELIST_WARN_BLOCKED_PEERS: bool = False + + # Maximum number of opened threads that are solving POW for send tokens + MAX_POW_THREADS: int = 5 + + # The error tolerance, to allow small rounding errors in Python, when comparing weights, + # accumulated weights, and scores + # How to use: + # if abs(w1 - w2) < WEIGHT_TOL: + # print('w1 and w2 are equal') + + # if w1 < w2 - WEIGHT_TOL: + # print('w1 is smaller than w2') + + # if w1 <= w2 + WEIGHT_TOL: + # print('w1 is smaller than or equal to w2') + + # if w1 > w2 + WEIGHT_TOL: + # print('w1 is greater than w2') + + # if w1 >= w2 - WEIGHT_TOL: + # print('w1 is greater than or equal to w2') + WEIGHT_TOL: float = 1e-10 + + # Maximum difference between the weight and the min_weight. + MAX_TX_WEIGHT_DIFF: float = 4.0 + MAX_TX_WEIGHT_DIFF_ACTIVATION: float = 32.0 + + # Maximum number of txs or blocks (each, not combined) to show on the dashboard + MAX_DASHBOARD_COUNT: int = 15 + + # Maximum number of txs or blocks returned by the '/transaction' endpoint + MAX_TX_COUNT: int = 15 + + # URL prefix where API is served, for instance: /v1a/status + API_VERSION_PREFIX: str = 'v1a' + + # If should use stratum to resolve pow of transactions in send tokens resource + SEND_TOKENS_STRATUM: bool = True + + # Maximum number of subscribed addresses per websocket connection + WS_MAX_SUBS_ADDRS_CONN: Optional[int] = None + + # Maximum number of subscribed addresses that do not have any outputs (also per websocket connection) + WS_MAX_SUBS_ADDRS_EMPTY: Optional[int] = None + + # Whether miners are assumed to mine txs by default + STRATUM_MINE_TXS_DEFAULT: bool = True + + # Percentage used to calculate the number of HTR that must be deposited when minting new tokens + # The same percentage is used to calculate the number of HTR that must be withdraw when melting tokens + # See for further information, see [rfc 0011-token-deposit]. + TOKEN_DEPOSIT_PERCENTAGE: float = 0.01 + + @field_validator('TOKEN_DEPOSIT_PERCENTAGE', mode='after') + @classmethod + def _validate_token_deposit_percentage(cls, token_deposit_percentage: float) -> float: + """Validate that TOKEN_DEPOSIT_PERCENTAGE results in an integer FEE_DIVISOR.""" + result = 1 / token_deposit_percentage + if not result.is_integer(): + raise ValueError( + f'TOKEN_DEPOSIT_PERCENTAGE must result in an integer FEE_DIVISOR. ' + f'Got TOKEN_DEPOSIT_PERCENTAGE={token_deposit_percentage}, FEE_DIVISOR={result}' + ) + return token_deposit_percentage + + # Array with the settings parameters that are used when calculating the settings hash + P2P_SETTINGS_HASH_FIELDS: list[str] = [ + 'P2PKH_VERSION_BYTE', + 'MULTISIG_VERSION_BYTE', + 'MIN_BLOCK_WEIGHT', + 'MIN_TX_WEIGHT', + 'BLOCK_DATA_MAX_SIZE' + ] + + # Maximum difference allowed between current time and a received tx timestamp (in seconds). Also used + # during peer connection. Peers shouldn't have their clocks more than MAX_FUTURE_TIMESTAMP_ALLOWED/2 apart + MAX_FUTURE_TIMESTAMP_ALLOWED: int = 5 * 60 + + # Multiplier for the value to increase the timestamp for the next retry moment to connect to the peer + PEER_CONNECTION_RETRY_INTERVAL_MULTIPLIER: int = 5 + + # Maximum retry interval for retrying to connect to the peer + PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL: int = 300 + + # Number max of connections in the p2p network + PEER_MAX_CONNECTIONS: int = 125 + + # Maximum period without receiving any messages from ther peer (in seconds). + PEER_IDLE_TIMEOUT: int = 60 + + # Maximum number of entrypoints that we accept that a peer broadcasts + PEER_MAX_ENTRYPOINTS: int = 30 + + # Filepath of ca certificate file to generate connection certificates + CA_FILEPATH: str = os.path.join(os.path.dirname(__file__), '../cert/ca.crt') + + # Filepath of ca key file to sign connection certificates + CA_KEY_FILEPATH: str = os.path.join(os.path.dirname(__file__), '../cert/ca.key') + + # Timeout (in seconds) for the downloading deferred (in the downloader) when syncing two peers + GET_DATA_TIMEOUT: int = 90 + + # Number of retries for downloading a tx from a peer (in the downloader) + GET_DATA_RETRIES: int = 5 + + # After how many blocks can a reward be spent + REWARD_SPEND_MIN_BLOCKS: int = 300 + + # Mamimum number of inputs accepted + MAX_NUM_INPUTS: int = 255 + + # Mamimum number of outputs accepted + MAX_NUM_OUTPUTS: int = 255 + + # Maximum size of each txout's script (in bytes) + MAX_OUTPUT_SCRIPT_SIZE: int = 1024 + + # Maximum size of each txin's data (in bytes) + MAX_INPUT_DATA_SIZE: int = 1024 + + # Maximum number of pubkeys per OP_CHECKMULTISIG + MAX_MULTISIG_PUBKEYS: int = 20 + + # Maximum number of signatures per OP_CHECKMULTISIG + MAX_MULTISIG_SIGNATURES: int = 15 + + # Maximum number of sig operations of all inputs on a given tx + # including the redeemScript in case of MultiSig + MAX_TX_SIGOPS_INPUT: int = 255 * 5 + + # Maximum number of sig operations of all outputs on a given tx + MAX_TX_SIGOPS_OUTPUT: int = 255 * 5 + + # Maximum number of transactions returned on addresses history API + MAX_TX_ADDRESSES_HISTORY: int = 150 + + # Maximum number of elements (inputs and outputs) to be returned on address history API + # As a normal tx has ~2-4 inputs and 2 outputs, I would say the maximum should be 150*6 = 900 elements + MAX_INPUTS_OUTPUTS_ADDRESS_HISTORY: int = 6 * 150 # MAX_TX_ADDRESSES_HISTORY + + # Maximum number of TXs that will be sent by the Mempool API. + MEMPOOL_API_TX_LIMIT: int = 100 + + # Capabilities + CAPABILITY_WHITELIST: str = 'whitelist' + CAPABILITY_SYNC_VERSION: str = 'sync-version' + CAPABILITY_GET_BEST_BLOCKCHAIN: str = 'get-best-blockchain' + CAPABILITY_IPV6: str = 'ipv6' # peers announcing this capability will be relayed ipv6 entrypoints from other peers + CAPABILITY_NANO_STATE: str = 'nano-state' # indicates support for nano-state commands + + # Where to download whitelist from + WHITELIST_URL: Optional[str] = None + + # Interval (in seconds) to broadcast dashboard metrics to websocket connections + WS_SEND_METRICS_INTERVAL: int = 1 + + # Interval (in seconds) to write data to prometheus + PROMETHEUS_WRITE_INTERVAL: int = 15 + + # Interval (in seconds) to update GC data for prometheus + PROMETHEUS_UPDATE_GC_INTERVAL: int = 60 + + # Interval (in seconds) to collect metrics data + METRICS_COLLECT_DATA_INTERVAL: int = 5 + + # Interval (in seconds) to collect metrics data from rocksdb + METRICS_COLLECT_ROCKSDB_DATA_INTERVAL: int = 86400 # 1 day + + # Used on testing to enable slow asserts that help catch bugs but we don't want to run in production + SLOW_ASSERTS: bool = False + + # List of soft voided transaction. + SOFT_VOIDED_TX_IDS: list[Annotated[bytes, BeforeValidator(parse_hex_str)]] = [] + + # List of transactions to skip verification. + SKIP_VERIFICATION: list[Annotated[bytes, BeforeValidator(parse_hex_str)]] = [] + + # Identifier used in metadata's voided_by to mark a tx as soft-voided. + SOFT_VOIDED_ID: bytes = b'tx-non-grata' + + # Identifier used in metadata's voided_by when an unexpected exception occurs at consensus. + CONSENSUS_FAIL_ID: bytes = b'consensus-fail' + + # Identifier used in metadata's voided_by to mark a tx as partially validated. + PARTIALLY_VALIDATED_ID: bytes = b'pending-validation' + + # Maximum number of sync running simultaneously. + MAX_ENABLED_SYNC: int = 8 + + # Time to update the peers that are running sync. + SYNC_UPDATE_INTERVAL: int = 10 * 60 # seconds + + # Interval to re-run peer discovery. + PEER_DISCOVERY_INTERVAL: int = 5 * 60 # seconds + + # Maximum number of GET_TIPS delayed calls per connection while running sync. + MAX_GET_TIPS_DELAYED_CALLS: int = 5 + + # Maximum number of blocks in the best blockchain list. + MAX_BEST_BLOCKCHAIN_BLOCKS: int = 20 + + # Default number of blocks in the best blockchain list. + DEFAULT_BEST_BLOCKCHAIN_BLOCKS: int = 10 + + # Time in seconds to request the best blockchain from peers. + BEST_BLOCKCHAIN_INTERVAL: int = 5 # seconds + + # Merged mining settings. The old value is going to be replaced by the new value through Feature Activation. + OLD_MAX_MERKLE_PATH_LENGTH: int = 12 + NEW_MAX_MERKLE_PATH_LENGTH: int = 20 + + # Maximum number of tx tips to accept in the initial phase of the mempool sync 1000 is arbitrary, but it should be + # more than enough for the forseeable future + MAX_MEMPOOL_RECEIVING_TIPS: int = 1000 + + # Max number of peers simultanously stored in the node + MAX_VERIFIED_PEERS: int = 10_000 + + # Max number of peers simultanously stored per-connection + MAX_UNVERIFIED_PEERS_PER_CONN: int = 100 + + # Used to enable nano contracts. + ENABLE_NANO_CONTRACTS: FeatureSetting = FeatureSetting.DISABLED + + # Used to enable fee-based tokens. + ENABLE_FEE_BASED_TOKENS: FeatureSetting = FeatureSetting.DISABLED + + # Used to enable opcodes V2. + ENABLE_OPCODES_V2: FeatureSetting = FeatureSetting.DISABLED + + # List of enabled blueprints. + BLUEPRINTS: dict[bytes, str] = {} + + @field_validator('BLUEPRINTS', mode='before') + @classmethod + def _parse_blueprints(cls, blueprints_raw: dict[str, str]) -> dict[bytes, str]: + """Parse dict[str, str] into dict[bytes, str].""" + blueprints: dict[bytes, str] = {} + for _id_str, _name in blueprints_raw.items(): + _id = bytes.fromhex(_id_str) + if _id in blueprints: + raise TypeError(f'Duplicate blueprint id: {_id_str}') + blueprints[_id] = _name + return blueprints + + # The pubkeys allowed to create on-chain-blueprints in the network + # XXX: in the future this restriction will be lifted, possibly through a feature activation + NC_ON_CHAIN_BLUEPRINT_RESTRICTED: bool = True + NC_ON_CHAIN_BLUEPRINT_ALLOWED_ADDRESSES: list[str] = [] + + # TODO: align this with a realistic value later + # fuel units are arbitrary but it's roughly the number of opcodes, memory_limit is in bytes + NC_INITIAL_FUEL_TO_LOAD_BLUEPRINT_MODULE: int = 100_000 # 100K opcodes + NC_MEMORY_LIMIT_TO_LOAD_BLUEPRINT_MODULE: int = 100 * 1024 * 1024 # 100MiB + NC_INITIAL_FUEL_TO_CALL_METHOD: int = 1_000_000 # 1M opcodes + NC_MEMORY_LIMIT_TO_CALL_METHOD: int = 1024 * 1024 * 1024 # 1GiB + + @model_validator(mode='after') + def _validate_genesis_tokens(self) -> Self: + """Validate genesis tokens.""" + genesis_tokens = self.GENESIS_TOKENS + genesis_token_units = self.GENESIS_TOKEN_UNITS + decimal_places = self.DECIMAL_PLACES + + if genesis_tokens != genesis_token_units * (10 ** decimal_places): + raise ValueError( + f'invalid tokens: GENESIS_TOKENS={genesis_tokens}, ' + f'GENESIS_TOKEN_UNITS={genesis_token_units}, DECIMAL_PLACES={decimal_places}' + ) + return self diff --git a/hathor/conf/testnet.yml b/hathorlib/hathorlib/conf/testnet.yml similarity index 100% rename from hathor/conf/testnet.yml rename to hathorlib/hathorlib/conf/testnet.yml diff --git a/hathor/conf/unittests.yml b/hathorlib/hathorlib/conf/unittests.yml similarity index 100% rename from hathor/conf/unittests.yml rename to hathorlib/hathorlib/conf/unittests.yml diff --git a/hathorlib/hathorlib/conf/utils.py b/hathorlib/hathorlib/conf/utils.py new file mode 100644 index 000000000..19861b92a --- /dev/null +++ b/hathorlib/hathorlib/conf/utils.py @@ -0,0 +1,40 @@ +""" +Copyright (c) Hathor Labs and its affiliates. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" +import importlib +from pathlib import Path +from typing import TypeVar, Union + +from pydantic import BaseModel + +T = TypeVar('T', bound=BaseModel) + + +def parse_hex_str(hex_str: Union[str, bytes]) -> bytes: + """Parse a raw hex string into bytes.""" + if isinstance(hex_str, str): + return bytes.fromhex(hex_str.lstrip('x')) + + if not isinstance(hex_str, bytes): + raise ValueError(f'expected \'str\' or \'bytes\', got {hex_str}') + + return hex_str + + +def load_yaml_settings(model: type[T], filepath: str) -> T: + """ + Load a settings model (pydantic based) and a filepath to a yaml file and returns a validated instance. + YAML settings may use the `extends` key to merge definition with another existing file. + """ + from hathorlib.utils.yaml import model_from_extended_yaml + return model_from_extended_yaml(model, filepath=filepath, custom_root=Path(__file__).parent) + + +def load_module_settings(model: type[T], module_path: str) -> T: + """Load module""" + settings_module = importlib.import_module(module_path) + settings = getattr(settings_module, 'SETTINGS') + return model.model_validate(settings) diff --git a/hathorlib/hathorlib/utils.py b/hathorlib/hathorlib/utils/__init__.py similarity index 100% rename from hathorlib/hathorlib/utils.py rename to hathorlib/hathorlib/utils/__init__.py diff --git a/hathor/utils/dict.py b/hathorlib/hathorlib/utils/dict.py similarity index 87% rename from hathor/utils/dict.py rename to hathorlib/hathorlib/utils/dict.py index b3c988150..dd9e299ab 100644 --- a/hathor/utils/dict.py +++ b/hathorlib/hathorlib/utils/dict.py @@ -13,9 +13,12 @@ # limitations under the License. from copy import deepcopy +from typing import Any, TypeVar +K = TypeVar('K') -def deep_merge(first_dict: dict, second_dict: dict) -> dict: + +def deep_merge(first_dict: dict[K, Any], second_dict: dict[K, Any]) -> dict[K, Any]: """ Recursively merges two dicts, returning a new one with the merged values. Keeps both input dicts intact. @@ -33,7 +36,7 @@ def deep_merge(first_dict: dict, second_dict: dict) -> dict: """ merged = deepcopy(first_dict) - def do_deep_merge(first: dict, second: dict) -> dict: + def do_deep_merge(first: dict[K, Any], second: dict[K, Any]) -> dict[K, Any]: for key in second: if key in first and isinstance(first[key], dict) and isinstance(second[key], dict): do_deep_merge(first[key], second[key]) diff --git a/hathor/utils/yaml.py b/hathorlib/hathorlib/utils/yaml.py similarity index 83% rename from hathor/utils/yaml.py rename to hathorlib/hathorlib/utils/yaml.py index 3c1794af8..ae3795474 100644 --- a/hathor/utils/yaml.py +++ b/hathorlib/hathorlib/utils/yaml.py @@ -11,17 +11,19 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import os from pathlib import Path -from typing import Any, Optional, Union +from typing import Any, Optional, TypeVar, Union import yaml +from pydantic import BaseModel -from hathor.utils.dict import deep_merge +from hathorlib.utils.dict import deep_merge _EXTENDS_KEY = 'extends' +T = TypeVar('T', bound=BaseModel) + def dict_from_yaml(*, filepath: Union[Path, str]) -> dict[str, Any]: """Takes a filepath to a yaml file and returns a dictionary with its contents.""" @@ -70,3 +72,10 @@ def dict_from_extended_yaml(*, filepath: Union[Path, str], custom_root: Optional extended_dict = deep_merge(dict_to_extend, extension_dict) return extended_dict + + +def model_from_extended_yaml(model: type[T], *, filepath: str, custom_root: Optional[Path] = None) -> T: + """Takes a pydantic model and a filepath to a yaml file and returns a validated model instance.""" + settings_dict = dict_from_extended_yaml(filepath=filepath, custom_root=custom_root) + + return model.model_validate(settings_dict) diff --git a/hathorlib/poetry.lock b/hathorlib/poetry.lock index d3c9028f1..2f91c80e1 100644 --- a/hathorlib/poetry.lock +++ b/hathorlib/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.3.2 and should not be changed by hand. [[package]] name = "aiohttp" @@ -6,6 +6,8 @@ version = "3.9.5" description = "Async http client/server framework (asyncio)" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"client\"" files = [ {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, @@ -94,7 +96,7 @@ multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] [[package]] name = "aiosignal" @@ -102,6 +104,8 @@ version = "1.4.0" description = "aiosignal: a list of registered asynchronous callbacks" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"client\"" files = [ {file = "aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e"}, {file = "aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7"}, @@ -111,12 +115,26 @@ files = [ frozenlist = ">=1.1.0" typing-extensions = {version = ">=4.2", markers = "python_version < \"3.13\""} +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + [[package]] name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"client\" and python_version < \"3.11\"" files = [ {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, @@ -128,6 +146,8 @@ version = "25.4.0" description = "Classes Without Boilerplate" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"client\"" files = [ {file = "attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"}, {file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"}, @@ -139,6 +159,7 @@ version = "2.1.1" description = "Base58 and Base58Check implementation." optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "base58-2.1.1-py3-none-any.whl", hash = "sha256:11a36f4d3ce51dfc1043f3218591ac4eb1ceb172919cebe05b52a5bcc8d245c2"}, {file = "base58-2.1.1.tar.gz", hash = "sha256:c5d0cb3f5b6e81e8e35da5754388ddcc6d0d14b6c6a132cb93d69ed580a7278c"}, @@ -153,6 +174,8 @@ version = "2.0.0" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, @@ -249,6 +272,7 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -260,6 +284,7 @@ version = "7.10.7" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a"}, {file = "coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5"}, @@ -371,7 +396,7 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cryptography" @@ -379,6 +404,7 @@ version = "42.0.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, @@ -433,6 +459,8 @@ version = "1.3.1" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, @@ -450,6 +478,7 @@ version = "7.0.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" +groups = ["dev"] files = [ {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, @@ -466,6 +495,8 @@ version = "1.8.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"client\"" files = [ {file = "frozenlist-1.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b37f6d31b3dcea7deb5e9696e529a6aa4a898adc33db82da12e4c60a7c4d2011"}, {file = "frozenlist-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef2b7b394f208233e471abc541cc6991f907ffd47dc72584acee3147899d6565"}, @@ -605,6 +636,8 @@ version = "3.11" description = "Internationalized Domain Names in Applications (IDNA)" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"client\"" files = [ {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, @@ -619,6 +652,7 @@ version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, @@ -630,6 +664,7 @@ version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -647,6 +682,8 @@ version = "0.7.5" description = "Mypyc runtime library" optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "implementation_name == \"cpython\" and platform_python_implementation != \"PyPy\"" files = [ {file = "librt-0.7.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:81056e01bba1394f1d92904ec61a4078f66df785316275edbaf51d90da8c6e26"}, {file = "librt-0.7.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d7c72c8756eeb3aefb1b9e3dac7c37a4a25db63640cac0ab6fc18e91a0edf05a"}, @@ -732,6 +769,7 @@ version = "0.7.0" description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -743,6 +781,8 @@ version = "6.7.0" description = "multidict implementation" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"client\"" files = [ {file = "multidict-6.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9f474ad5acda359c8758c8accc22032c6abe6dc87a8be2440d097785e27a9349"}, {file = "multidict-6.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b7a9db5a870f780220e931d0002bbfd88fb53aceb6293251e2c839415c1b20e"}, @@ -901,6 +941,8 @@ version = "1.19.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "implementation_name == \"cpython\"" files = [ {file = "mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec"}, {file = "mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b"}, @@ -962,6 +1004,8 @@ version = "1.1.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "implementation_name == \"cpython\"" files = [ {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, @@ -973,6 +1017,7 @@ version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, @@ -984,6 +1029,8 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "implementation_name == \"cpython\"" files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -995,6 +1042,7 @@ version = "1.6.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, @@ -1010,6 +1058,8 @@ version = "0.4.1" description = "Accelerated property cache" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"client\"" files = [ {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db"}, {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8"}, @@ -1141,6 +1191,7 @@ version = "2.11.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, @@ -1152,6 +1203,7 @@ version = "0.92.20241201" description = "Utilities for Bitcoin and altcoin addresses and transaction manipulation." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pycoin-0.92.20241201.tar.gz", hash = "sha256:6e937be181573ccf02b35064844bec46de130386b45f3df196d3074a8c790512"}, ] @@ -1162,17 +1214,176 @@ version = "2.23" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\"" files = [ {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"}, {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}, ] +[[package]] +name = "pydantic" +version = "2.12.5" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"}, + {file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.41.5" +typing-extensions = ">=4.14.1" +typing-inspection = ">=0.4.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"}, + {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"}, +] + +[package.dependencies] +typing-extensions = ">=4.14.1" + [[package]] name = "pyflakes" version = "3.2.0" description = "passive checker of Python programs" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, @@ -1184,6 +1395,7 @@ version = "8.1.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest-8.1.2-py3-none-any.whl", hash = "sha256:6c06dc309ff46a05721e6fd48e492a775ed8165d2ecdf57f156a80c7e95bb142"}, {file = "pytest-8.1.2.tar.gz", hash = "sha256:f3c45d1d5eed96b01a2aea70dee6a4a366d51d38f9957768083e4fecfc77f3ef"}, @@ -1206,6 +1418,7 @@ version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, @@ -1218,12 +1431,97 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] +[[package]] +name = "pyyaml" +version = "6.0.3" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"}, + {file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"}, + {file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"}, + {file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}, + {file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}, + {file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}, + {file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}, + {file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}, + {file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}, + {file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"}, + {file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"}, + {file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"}, + {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, +] + [[package]] name = "structlog" version = "22.3.0" description = "Structured Logging for Python" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"client\"" files = [ {file = "structlog-22.3.0-py3-none-any.whl", hash = "sha256:b403f344f902b220648fa9f286a23c0cc5439a5844d271fec40562dbadbc70ad"}, {file = "structlog-22.3.0.tar.gz", hash = "sha256:e7509391f215e4afb88b1b80fa3ea074be57a5a17d794bd436a5c949da023333"}, @@ -1241,6 +1539,8 @@ version = "2.3.0" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_full_version <= \"3.11.0a6\"" files = [ {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, @@ -1286,16 +1586,45 @@ files = [ {file = "tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"}, ] +[[package]] +name = "types-pyyaml" +version = "6.0.12.9" +description = "Typing stubs for PyYAML" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "types-PyYAML-6.0.12.9.tar.gz", hash = "sha256:c51b1bd6d99ddf0aa2884a7a328810ebf70a4262c292195d3f4f9a0005f9eeb6"}, + {file = "types_PyYAML-6.0.12.9-py3-none-any.whl", hash = "sha256:5aed5aa66bd2d2e158f75dda22b059570ede988559f030cf294871d3b647e3e8"}, +] + [[package]] name = "typing-extensions" version = "4.15.0" description = "Backported and Experimental Type Hints for Python 3.9+" optional = false python-versions = ">=3.9" +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] +markers = {dev = "implementation_name == \"cpython\" or python_version < \"3.11\""} + +[[package]] +name = "typing-inspection" +version = "0.4.2" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, + {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" [[package]] name = "yarl" @@ -1303,6 +1632,8 @@ version = "1.22.0" description = "Yet another URL library" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"client\"" files = [ {file = "yarl-1.22.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c7bd6683587567e5a49ee6e336e0612bec8329be1b7d4c8af5687dcdeb67ee1e"}, {file = "yarl-1.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5cdac20da754f3a723cceea5b3448e1a2074866406adeb4ef35b469d089adb8f"}, @@ -1445,6 +1776,6 @@ propcache = ">=0.2.1" client = ["aiohttp", "structlog"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = ">=3.9,<4" -content-hash = "c767f164bbb834257be64e95379045828434e7224a9b43b4e565f78a0f25c78f" +content-hash = "cb6fbc4ed90b7ea52a057875667cd77963ed45b6db27a8d51d9700d8029593ec" diff --git a/hathorlib/pyproject.toml b/hathorlib/pyproject.toml index e3c5f9af6..88d27d223 100644 --- a/hathorlib/pyproject.toml +++ b/hathorlib/pyproject.toml @@ -40,6 +40,8 @@ structlog = {version = "~22.3.0", optional = true} aiohttp = {version = "~3.9.3", optional = true} cryptography = "~42.0.5" pycoin = "~0.92" +pydantic = "^2.0" +pyyaml = "^6.0.1" [tool.poetry.dev-dependencies] isort = {version = "~5.13.2", extras = ["colors"]} @@ -47,6 +49,7 @@ mypy = {version = "^1.9.0", markers = "implementation_name == 'cpython'"} pytest = "~8.1.1" pytest-cov = "~5.0.0" flake8 = "~7.0.0" +types-pyyaml = "=6.0.12.9" [tool.poetry.extras] client = ["aiohttp", "structlog"] @@ -71,6 +74,9 @@ warn_unused_ignores = true namespace_packages = true show_error_codes = true show_error_context = true +plugins = [ + "pydantic.mypy", +] [tool.pytest.ini_options] minversion = "6.0" diff --git a/hathor_tests/utils_modules/__init__.py b/hathorlib/tests/utils_modules/__init__.py similarity index 100% rename from hathor_tests/utils_modules/__init__.py rename to hathorlib/tests/utils_modules/__init__.py diff --git a/hathor_tests/utils_modules/fixtures/empty.yml b/hathorlib/tests/utils_modules/fixtures/empty.yml similarity index 100% rename from hathor_tests/utils_modules/fixtures/empty.yml rename to hathorlib/tests/utils_modules/fixtures/empty.yml diff --git a/hathor_tests/utils_modules/fixtures/empty_extends.yml b/hathorlib/tests/utils_modules/fixtures/empty_extends.yml similarity index 100% rename from hathor_tests/utils_modules/fixtures/empty_extends.yml rename to hathorlib/tests/utils_modules/fixtures/empty_extends.yml diff --git a/hathor_tests/utils_modules/fixtures/invalid_extends.yml b/hathorlib/tests/utils_modules/fixtures/invalid_extends.yml similarity index 100% rename from hathor_tests/utils_modules/fixtures/invalid_extends.yml rename to hathorlib/tests/utils_modules/fixtures/invalid_extends.yml diff --git a/hathor_tests/utils_modules/fixtures/mainnet_extends.yml b/hathorlib/tests/utils_modules/fixtures/mainnet_extends.yml similarity index 100% rename from hathor_tests/utils_modules/fixtures/mainnet_extends.yml rename to hathorlib/tests/utils_modules/fixtures/mainnet_extends.yml diff --git a/hathor_tests/utils_modules/fixtures/number.yml b/hathorlib/tests/utils_modules/fixtures/number.yml similarity index 100% rename from hathor_tests/utils_modules/fixtures/number.yml rename to hathorlib/tests/utils_modules/fixtures/number.yml diff --git a/hathor_tests/utils_modules/fixtures/self_extends.yml b/hathorlib/tests/utils_modules/fixtures/self_extends.yml similarity index 100% rename from hathor_tests/utils_modules/fixtures/self_extends.yml rename to hathorlib/tests/utils_modules/fixtures/self_extends.yml diff --git a/hathor_tests/utils_modules/fixtures/valid.yml b/hathorlib/tests/utils_modules/fixtures/valid.yml similarity index 100% rename from hathor_tests/utils_modules/fixtures/valid.yml rename to hathorlib/tests/utils_modules/fixtures/valid.yml diff --git a/hathor_tests/utils_modules/fixtures/valid_extends.yml b/hathorlib/tests/utils_modules/fixtures/valid_extends.yml similarity index 100% rename from hathor_tests/utils_modules/fixtures/valid_extends.yml rename to hathorlib/tests/utils_modules/fixtures/valid_extends.yml diff --git a/hathor_tests/utils_modules/test_yaml.py b/hathorlib/tests/utils_modules/test_yaml.py similarity index 97% rename from hathor_tests/utils_modules/test_yaml.py rename to hathorlib/tests/utils_modules/test_yaml.py index be7a88e39..d2c118a99 100644 --- a/hathor_tests/utils_modules/test_yaml.py +++ b/hathorlib/tests/utils_modules/test_yaml.py @@ -19,7 +19,7 @@ import pytest from structlog import get_logger -from hathor.utils.yaml import dict_from_extended_yaml, dict_from_yaml +from hathorlib.utils.yaml import dict_from_extended_yaml, dict_from_yaml logger = get_logger() @@ -136,8 +136,7 @@ def test_dict_from_extended_yaml_valid_extends(): def test_dict_from_yaml_mainnet_extends(): - from hathor.conf import MAINNET_SETTINGS_FILEPATH - + from hathorlib.conf import MAINNET_SETTINGS_FILEPATH filepath = _get_absolute_filepath('fixtures/mainnet_extends.yml') mainnet_dict = dict_from_yaml(filepath=MAINNET_SETTINGS_FILEPATH) result = dict_from_extended_yaml(filepath=filepath, custom_root=Path(MAINNET_SETTINGS_FILEPATH).parent) diff --git a/poetry.lock b/poetry.lock index 0ab8e0cd1..447136741 100644 --- a/poetry.lock +++ b/poetry.lock @@ -759,12 +759,13 @@ optional = false python-versions = ">=3.9,<4" groups = ["main"] files = [] -develop = false +develop = true [package.dependencies] base58 = "~2.1.1" cryptography = "~42.0.5" pycoin = "~0.92" +pydantic = "^2.0" [package.extras] client = ["aiohttp (>=3.9.3,<3.10.0)", "structlog (>=22.3.0,<22.4.0)"] @@ -2826,4 +2827,4 @@ sentry = ["sentry-sdk", "structlog-sentry"] [metadata] lock-version = "2.1" python-versions = ">=3.11,<4" -content-hash = "fc5cc4688627fc05c28ce3491cbfd35fa34b46299ae1d8dc24c0a3edd742ad6d" +content-hash = "7c0832c1a10d81282c4994b7ea8488570484f3046fab0f3624708130dbc73220" diff --git a/pyproject.toml b/pyproject.toml index 785cd382a..bdae2de72 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ pyyaml = "^6.0.1" typing-extensions = "~4.12.2" python-healthchecklib = "^0.1.0" multidict = "=6.7.0" -hathorlib = {path = "hathorlib"} +hathorlib = {path = "hathorlib", develop = true} cffi = "=1.17.1" zope-interface = "=8.2" packaging = "=26.0"