From 065d1f351ea252aadab0a039cf324c4acd4d7737 Mon Sep 17 00:00:00 2001 From: Thykof Date: Thu, 13 Nov 2025 17:55:51 +0100 Subject: [PATCH 01/24] remove blacklist.validator_exceptions --- docs/miner_reference.md | 33 --------------------------------- neurons/miner.py | 17 ++++------------- synth/utils/config.py | 8 -------- 3 files changed, 4 insertions(+), 54 deletions(-) diff --git a/docs/miner_reference.md b/docs/miner_reference.md index 23a891f7..e74a33a8 100644 --- a/docs/miner_reference.md +++ b/docs/miner_reference.md @@ -166,39 +166,6 @@ pm2 start miner.config.js -- --blacklist.validator_min_stake 1000 [Back to top ^][table-of-contents] -#### `--blacklist.validator_exceptions INTEGER INTEGER INTEGER ...` - -Optional list of validator exceptions (e.g., --blacklist.validator_exceptions 0 1 8 17 34 49 53 38). - -Default: `[]` - -Example: - -```js -// miner.config.js -module.exports = { - apps: [ - { - name: "miner", - interpreter: "python3", - script: "./neurons/miner.py", - args: "--blacklist.validator_exceptions 0 1 8 17 34 49 53 38", - env: { - PYTHONPATH: ".", - }, - }, - ], -}; -``` - -Alternatively, you can add the args directly to the command: - -```shell -pm2 start miner.config.js -- --blacklist.validator_exceptions 0 1 8 17 34 49 53 38 -``` - -[Back to top ^][table-of-contents] - #### `--logging.debug` Turn on bittensor debugging information. diff --git a/neurons/miner.py b/neurons/miner.py index 8cf858ee..ab85ece5 100644 --- a/neurons/miner.py +++ b/neurons/miner.py @@ -106,21 +106,12 @@ async def blacklist(self, synapse: Simulation) -> typing.Tuple[bool, str]: uid = self.metagraph.hotkeys.index(synapse.dendrite.hotkey) stake = self.metagraph.S[uid] bt.logging.info(f"Requesting UID: {uid} | Stake at UID: {stake}") - bt.logging.debug( - f"Whitelisted validators: {self.config.blacklist.validator_exceptions}" - ) - - if uid in self.config.blacklist.validator_exceptions: + if stake <= self.config.blacklist.validator_min_stake: + # Ignore requests if the stake is below minimum bt.logging.info( - f"Requesting UID: {uid} whitelisted as a validator" + f"Hotkey: {synapse.dendrite.hotkey}: stake below minimum threshold of {self.config.blacklist.validator_min_stake}" ) - else: - if stake <= self.config.blacklist.validator_min_stake: - # Ignore requests if the stake is below minimum - bt.logging.info( - f"Hotkey: {synapse.dendrite.hotkey}: stake below minimum threshold of {self.config.blacklist.validator_min_stake}" - ) - return True, "Stake below minimum threshold" + return True, "Stake below minimum threshold" if self.config.blacklist.force_validator_permit: # If the config is set to force validator permit, then we should only allow requests from validators. diff --git a/synth/utils/config.py b/synth/utils/config.py index 7c3681e3..dc67a063 100644 --- a/synth/utils/config.py +++ b/synth/utils/config.py @@ -141,14 +141,6 @@ def add_miner_args(_, parser): help="Minimum validator stake to accept forward requests from as a miner", ) - parser.add_argument( - "--blacklist.validator_exceptions", - type=int, - nargs="+", - default=[], - help="List of validator exceptions (e.g., --blacklist.validator_exceptions 1 3 10)", - ) - parser.add_argument( "--wandb.enabled", type=bool, From b7096554b9d4bcdd15deeeabe4cb00aa35595358 Mon Sep 17 00:00:00 2001 From: Thykof Date: Thu, 13 Nov 2025 17:56:08 +0100 Subject: [PATCH 02/24] remove mypy from precommit --- .pre-commit-config.yaml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4fd5cfe8..867311cc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,8 +8,3 @@ repos: rev: 7.1.2 hooks: - id: flake8 -- repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.15.0 - hooks: - - id: mypy - exclude: ^(docs/|example-plugin/) From b2fc40a765ed86847ff62de9541451bd138fd4ab Mon Sep 17 00:00:00 2001 From: Thykof Date: Thu, 13 Nov 2025 17:56:27 +0100 Subject: [PATCH 03/24] update validator guide: nprocs:2 --- docs/validator_guide.md | 48 +++++------------------------ synth/base/dendrite_multiprocess.py | 2 +- synth/utils/config.py | 9 +----- 3 files changed, 9 insertions(+), 50 deletions(-) diff --git a/docs/validator_guide.md b/docs/validator_guide.md index 2a4c8926..593c82fe 100644 --- a/docs/validator_guide.md +++ b/docs/validator_guide.md @@ -25,7 +25,6 @@ - [`--neuron.sample_size INTEGER`](#--neuronsample_size-integer) - [`--neuron.timeout INTEGER`](#--neurontimeout-integer) - [`--neuron.nprocs INTEGER`](#--neuronnprocs-integer) - - [`--neuron.use_multiprocess INTEGER`](#--neuronuse_multiprocess-integer) - [`--neuron.vpermit_tao_limit INTEGER`](#--neuronvpermit_tao_limit-integer) - [`--wallet.hotkey TEXT`](#--wallethotkey-text) - [`--wallet.name TEXT`](#--walletname-text) @@ -808,9 +807,9 @@ pm2 start validator.config.js -- --neuron.timeout 120 #### `--neuron.nprocs INTEGER` -The number of processes to run for the validator dendrite, (e.g. 8). +The number of processes to run for the validator dendrite, (e.g. 2). -Default: `8` +Default: `2` Example: @@ -819,10 +818,10 @@ Example: module.exports = { apps: [ { - name: "validator", - interpreter: "python3", - script: "./neurons/validator.py", - args: "--neuron.nprocs 8", + name: 'validator', + interpreter: 'python3', + script: './neurons/validator.py', + args: '--neuron.nprocs 2', env: { PYTHONPATH: ".", }, @@ -834,7 +833,7 @@ module.exports = { Alternatively, you can add the args directly to the command: ```shell -pm2 start validator.config.js -- --neuron.nprocs 8 +pm2 start validator.config.js -- --neuron.nprocs 2 ``` [Back to top ^][table-of-contents] @@ -872,39 +871,6 @@ pm2 start validator.config.js -- --neuron.vpermit_tao_limit 1000 [Back to top ^][table-of-contents] -#### `--neuron.use_multiprocess INTEGER` - -Wether to use multiple processes for the validator dendrite. - -Default: `1` - -Example to disable multiprocess: - -```js -// validator.config.js -module.exports = { - apps: [ - { - name: "validator", - interpreter: "python3", - script: "./neurons/validator.py", - args: "--neuron.use_multiprocess 0", - env: { - PYTHONPATH: ".", - }, - }, - ], -}; -``` - -Alternatively, you can add the args directly to the command: - -```shell -pm2 start validator.config.js -- --neuron.nprocs 8 -``` - -[Back to top ^][table-of-contents] - #### `--wallet.hotkey TEXT` The hotkey of the wallet. diff --git a/synth/base/dendrite_multiprocess.py b/synth/base/dendrite_multiprocess.py index d01eead0..f2943836 100644 --- a/synth/base/dendrite_multiprocess.py +++ b/synth/base/dendrite_multiprocess.py @@ -304,7 +304,7 @@ def sync_forward_multiprocess( axons: list[bt.AxonInfo], synapse: Simulation, timeout: float, - nprocs: int = 8, + nprocs: int = 2, ) -> list[Simulation]: bt.logging.debug( f"Starting multiprocess forward with {nprocs} processes.", "dendrite" diff --git a/synth/utils/config.py b/synth/utils/config.py index dc67a063..3a2b42f6 100644 --- a/synth/utils/config.py +++ b/synth/utils/config.py @@ -191,14 +191,7 @@ def add_validator_args(_, parser: argparse.ArgumentParser): "--neuron.nprocs", type=int, help="The number of processes to run for the validator dendrite.", - default=8, - ) - - parser.add_argument( - "--neuron.use_multiprocess", - type=int, - help="The number of processes to run for the validator dendrite.", - default=1, + default=2, ) parser.add_argument( From 6a5d740c8afc43d612592d64859b0e1e0e4143b2 Mon Sep 17 00:00:00 2001 From: Thykof Date: Thu, 13 Nov 2025 17:57:52 +0100 Subject: [PATCH 04/24] rework round_time_to_minutes --- synth/utils/helpers.py | 31 +++++--------------------- tests/test_helpers.py | 46 ++++++++++++++++----------------------- tests/test_simulations.py | 2 +- 3 files changed, 25 insertions(+), 54 deletions(-) diff --git a/synth/utils/helpers.py b/synth/utils/helpers.py index 19f99f26..6aa8f92e 100644 --- a/synth/utils/helpers.py +++ b/synth/utils/helpers.py @@ -134,40 +134,19 @@ def get_intersecting_arrays(array1, array2): return filtered_array1, filtered_array2 -def round_time_to_minutes( - dt: datetime, in_seconds: int, extra_seconds=0 -) -> datetime: - """round validation time to the closest minute and add extra minutes +def round_time_to_minutes(dt: datetime, extra_seconds=0) -> datetime: + """round validation time to the closest minute and add extra seconds Args: dt (datetime): request_time - in_seconds (int): 60 extra_seconds (int, optional): self.timeout_extra_seconds: 120. Defaults to 0. Returns: datetime: rounded-up datetime """ - # Define the rounding interval - rounding_interval = timedelta(seconds=in_seconds) - - # Calculate the number of seconds since the start of the day - seconds = ( - dt - dt.replace(hour=0, minute=0, second=0, microsecond=0) - ).total_seconds() - - # Calculate the next multiple of time_increment in seconds - next_interval_seconds = ( - (seconds // rounding_interval.total_seconds()) + 1 - ) * rounding_interval.total_seconds() - - # Get the rounded-up datetime - rounded_time = ( - dt.replace(hour=0, minute=0, second=0, microsecond=0) - + timedelta(seconds=next_interval_seconds) - + timedelta(seconds=extra_seconds) - ) - - return rounded_time + return (dt + timedelta(minutes=1)).replace( + second=0, microsecond=0 + ) + timedelta(seconds=extra_seconds) def from_iso_to_unix_time(iso_time: str): diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 3b97b21b..6f2ccc39 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -1,6 +1,7 @@ import unittest from datetime import datetime, timedelta, timezone + from synth.utils.helpers import ( convert_prices_to_time_format, get_intersecting_arrays, @@ -75,7 +76,7 @@ def test_get_intersecting_arrays(self): ) def test_round_time_to_minutes(self): - time_increment = 60 + time_increment = 0 self.assertEqual( round_time_to_minutes( @@ -92,50 +93,41 @@ def test_round_time_to_minutes(self): "2024-11-25T19:04:00", ) - def test_round_time_to_five_minutes(self): - time_increment = 300 - + def test_round_time_add_extra(self): + # add three extra minutes dt_str_1 = "2024-11-25T19:01:59.940515" - dt_str_2 = "2024-11-25T19:03:59.940515" - result_1 = round_time_to_minutes( - datetime.fromisoformat(dt_str_1), time_increment - ) - result_2 = round_time_to_minutes( - datetime.fromisoformat(dt_str_2), time_increment + datetime.fromisoformat(dt_str_1), 60 * 3 ) - self.assertEqual(result_1.isoformat(), "2024-11-25T19:05:00") + + # add one extra minute + dt_str_2 = "2024-11-25T19:03:59.940515" + result_2 = round_time_to_minutes(datetime.fromisoformat(dt_str_2), 60) self.assertEqual(result_2.isoformat(), "2024-11-25T19:05:00") - def test_round_time_to_minutes_plus_two_extra(self): - dt_str_1 = "2024-11-25T19:01:59.940515" - result_1 = round_time_to_minutes( - datetime.fromisoformat(dt_str_1), 60, 120 - ) - self.assertEqual(result_1.isoformat(), "2024-11-25T19:04:00") + def test_round_time_add_extra_seconds(self): + dt_str_1 = "2024-11-25T19:11:46.940515" + result_1 = round_time_to_minutes(datetime.fromisoformat(dt_str_1), 10) + self.assertEqual(result_1.isoformat(), "2024-11-25T19:12:10") dt_str_2 = "2024-11-25T19:03:09.659353" - result_2 = round_time_to_minutes( - datetime.fromisoformat(dt_str_2), 60, 120 - ) + result_2 = round_time_to_minutes(datetime.fromisoformat(dt_str_2), 120) self.assertEqual(result_2.isoformat(), "2024-11-25T19:06:00") def test_round_time_to_two_minutes(self): - time_increment = 120 extra_seconds = 60 dt_str_1 = "2024-11-25T19:01:59.940515" - dt_str_2 = "2024-11-25T19:03:59.940515" - result_1 = round_time_to_minutes( - datetime.fromisoformat(dt_str_1), time_increment, extra_seconds + datetime.fromisoformat(dt_str_1), extra_seconds ) + self.assertEqual(result_1.isoformat(), "2024-11-25T19:03:00") + + dt_str_2 = "2024-11-25T19:03:59.940515" result_2 = round_time_to_minutes( - datetime.fromisoformat(dt_str_2), time_increment, extra_seconds + datetime.fromisoformat(dt_str_2), extra_seconds ) - - self.assertEqual(result_1.isoformat(), "2024-11-25T19:03:00") self.assertEqual(result_2.isoformat(), "2024-11-25T19:05:00") def test_from_iso_to_unix_time(self): diff --git a/tests/test_simulations.py b/tests/test_simulations.py index b1062130..707f8304 100644 --- a/tests/test_simulations.py +++ b/tests/test_simulations.py @@ -33,7 +33,7 @@ def test_run(): ) current_time = get_current_time() - start_time = round_time_to_minutes(current_time, 60, 120) + start_time = round_time_to_minutes(current_time, 120) simulation_input.start_time = start_time.isoformat() print("start_time", simulation_input.start_time) From 02195d386d374a784ebedb4e1e92ceb4f7d25499 Mon Sep 17 00:00:00 2001 From: Thykof Date: Thu, 13 Nov 2025 17:58:06 +0100 Subject: [PATCH 05/24] change some logs to trace --- synth/base/dendrite.py | 2 +- synth/base/dendrite_multiprocess.py | 4 ++-- synth/utils/uids.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/synth/base/dendrite.py b/synth/base/dendrite.py index 654716c9..748c0c91 100644 --- a/synth/base/dendrite.py +++ b/synth/base/dendrite.py @@ -265,7 +265,7 @@ def log_exception(exception: Exception): ValidationError, ), ): - bt.logging.debug(f"{error_type}#{error_id}: {exception}") + bt.logging.trace(f"{error_type}#{error_id}: {exception}") else: bt.logging.error(f"{error_type}#{error_id}: {exception}") traceback.print_exc(file=sys.stderr) diff --git a/synth/base/dendrite_multiprocess.py b/synth/base/dendrite_multiprocess.py index f2943836..6cb75d15 100644 --- a/synth/base/dendrite_multiprocess.py +++ b/synth/base/dendrite_multiprocess.py @@ -169,7 +169,7 @@ async def call( synapse.dendrite.signature = signature try: - bt.logging.debug( + bt.logging.trace( f"dendrite | --> | {synapse.get_total_size()} B | {synapse.name} | {synapse.axon.hotkey} | {synapse.axon.ip}:{str(synapse.axon.port)} | 0 | Success" ) response = await client.post( @@ -189,7 +189,7 @@ async def call( synapse = process_error_message(synapse, REQUEST_NAME, e) finally: - bt.logging.debug( + bt.logging.trace( f"dendrite | <-- | {synapse.get_total_size()} B | {synapse.name} | {synapse.axon.hotkey} | {synapse.axon.ip}:{str(synapse.axon.port)} | {synapse.dendrite.status_code} | {synapse.dendrite.status_message}" ) diff --git a/synth/utils/uids.py b/synth/utils/uids.py index 3de98135..4f1a7dd7 100644 --- a/synth/utils/uids.py +++ b/synth/utils/uids.py @@ -14,7 +14,7 @@ def check_uid_availability( """ # Filter non serving axons. if not metagraph.axons[uid].is_serving: - bt.logging.debug(f"uid {uid} is not serving") + bt.logging.trace(f"uid {uid} is not serving") return False # Filter validator permit > 1024 stake. if metagraph.validator_permit[uid]: From e21bd0010f471537ae2483d5b94ef1e879d6f811 Mon Sep 17 00:00:00 2001 From: Thykof Date: Thu, 13 Nov 2025 18:12:27 +0100 Subject: [PATCH 06/24] fixup use_multiprocess --- synth/validator/forward.py | 30 +++++++++++++----------------- 1 file changed, 13 insertions(+), 17 deletions(-) diff --git a/synth/validator/forward.py b/synth/validator/forward.py index 14270c1e..d76e7d29 100644 --- a/synth/validator/forward.py +++ b/synth/validator/forward.py @@ -226,25 +226,21 @@ async def query_available_miners_and_save_responses( start_time = time.time() - if base_neuron.config.neuron.use_multiprocess == 1: - synapses = sync_forward_multiprocess( - base_neuron.dendrite.keypair, - base_neuron.dendrite.uuid, - base_neuron.dendrite.external_ip, - axons, - synapse, - timeout, - base_neuron.config.neuron.nprocs, - ) - else: - synapses = await base_neuron.dendrite.forward( - axons=axons, - synapse=synapse, - timeout=timeout, - ) + synapses = sync_forward_multiprocess( + base_neuron.dendrite.keypair, + base_neuron.dendrite.uuid, + base_neuron.dendrite.external_ip, + axons, + synapse, + timeout, + base_neuron.config.neuron.nprocs, + ) total_process_time = str(time.time() - start_time) - bt.logging.debug(f"Forwarding took {total_process_time} seconds") + bt.logging.debug( + f"Forwarding took {total_process_time} seconds", + "sync_forward_multiprocess", + ) miner_predictions = {} for i, synapse in enumerate(synapses): From 887b3200ba7d9f55b1f514f653962ecd8f23c8b2 Mon Sep 17 00:00:00 2001 From: Thykof Date: Thu, 13 Nov 2025 18:12:37 +0100 Subject: [PATCH 07/24] wip HFT --- neurons/miner.py | 2 +- neurons/validator.py | 227 +++++++++++++++++-------------------- synth/base/dendrite.py | 97 +++++++--------- synth/base/miner.py | 139 ++++++++++++++++++++++- synth/base/neuron.py | 2 +- synth/base/validator.py | 79 +------------ synth/validator/forward.py | 70 +++++++++++- 7 files changed, 360 insertions(+), 256 deletions(-) diff --git a/neurons/miner.py b/neurons/miner.py index ab85ece5..616a71df 100644 --- a/neurons/miner.py +++ b/neurons/miner.py @@ -173,7 +173,7 @@ def load_state(self): def set_weights(self): pass - async def forward_validator(self): + def forward_validator(self): pass def print_info(self): diff --git a/neurons/validator.py b/neurons/validator.py index 8eee8806..e0a4fee2 100644 --- a/neurons/validator.py +++ b/neurons/validator.py @@ -1,9 +1,10 @@ # The MIT License (MIT) # Copyright © 2023 Yuma Rao # Copyright © 2023 Mode Labs -import asyncio -from datetime import datetime, timedelta +from datetime import datetime import multiprocessing as mp +import sched +import time # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the “Software”), to deal in the Software without restriction, including without limitation @@ -29,15 +30,12 @@ from synth.utils.helpers import ( get_current_time, round_time_to_minutes, - timeout_until, - more_paths_launch_time, ) from synth.utils.logging import setup_gcp_logging from synth.utils.opening_hours import should_skip_xau from synth.validator.forward import ( calculate_moving_average_and_update_rewards, calculate_rewards_and_update_scores, - get_available_miners_and_update_metagraph_history, query_available_miners_and_save_responses, send_weights_to_bittensor_and_update_weights_history, ) @@ -48,10 +46,14 @@ load_dotenv() +# Constants +IN_1_HOUR = 60 * 60 +IN_15_MINUTES = 60 * 15 +IN_2_MINUTES = 60 * 2 + + class Validator(BaseValidatorNeuron): """ - Your validator neuron class. You should use this class to define your validator's behavior. In particular, you should replace the forward function with your own logic. - This class inherits from the BaseValidatorNeuron class, which in turn inherits from BaseNeuron. The BaseNeuron class takes care of routine tasks such as setting up wallet, subtensor, metagraph, logging directory, parsing config, etc. You can override any of the methods in BaseNeuron if you need to customize the behavior. This class provides reasonable default behavior for a validator such as keeping a moving average of the scores of the miners and using them to set weights at the end of each epoch. Additionally, the scores are reset for new hotkeys at the end of each epoch. @@ -64,38 +66,41 @@ def __init__(self, config=None): bt.logging.info("load_state()") self.load_state() + self.miner_uids: list[int] = [] self.miner_data_handler = MinerDataHandler() self.price_data_provider = PriceDataProvider() + self.scheduler = sched.scheduler(time.time, time.sleep) + self.simulation_input_list = [ # input data: give me prediction of BTC price for the next 1 day for every 5 min of time SimulationInput( asset="BTC", time_increment=300, - time_length=86400, - num_simulations=100, + time_length=900, + num_simulations=1000, ), SimulationInput( asset="ETH", time_increment=300, time_length=86400, - num_simulations=100, + num_simulations=1000, ), SimulationInput( asset="XAU", time_increment=300, time_length=86400, - num_simulations=100, + num_simulations=1000, ), SimulationInput( asset="SOL", time_increment=300, time_length=86400, - num_simulations=100, + num_simulations=1000, ), ] - self.timeout_extra_seconds = 60 + self.timeout_extra_seconds = 120 self.assert_assets_supported() @@ -104,72 +109,93 @@ def assert_assets_supported(self): for simulation in self.simulation_input_list: assert simulation.asset in PriceDataProvider.TOKEN_MAP - async def forward_validator(self): + def forward_validator(self): """ Validator forward pass. Consists of: - Generating the query - Querying the miners - Getting the responses - - Rewarding the miners + - Calculating scores - Updating the scores + - Rewarding the miners """ bt.logging.info("calling forward_validator()") - return [ - asyncio.create_task(self.forward_prompt()), - asyncio.create_task(self.forward_score()), - ] - async def wait_till_next_simulation( - self, request_time: datetime, simulation_input_list: list - ): - # wait until the next simulation - next_iteration = request_time + timedelta( - minutes=60 / len(simulation_input_list) + current_time = get_current_time() + + # for idx, simulation_input in enumerate(self.simulation_input_list): + # # shift by 30 seconds to not overlap with HFT prompts + # next_prompt_time = round_time_to_minutes( + # current_time, (IN_15_MINUTES * idx) + 30 + # ) + # bt.logging.info( + # f"scheduling regular prompt at {next_prompt_time.isoformat()}" + # ) + # self.scheduler.enter( + # (next_prompt_time - current_time).total_seconds(), + # 1, + # self.forward_prompt, + # (simulation_input,), + # ) + + # initialize miners data + self.update_miners() + + # send simultaneously all assets for HFT + self.schedule_prompt_hft(current_time) + + # bt.logging.info( + # f"scheduling scoring at {(current_time + timedelta(seconds=IN_15_MINUTES)).isoformat()}" + # ) + # self.scheduler.enter( + # IN_15_MINUTES, + # 1, + # self.forward_score, + # ) + self.scheduler.run() + + def schedule_prompt_hft(self, current_time: datetime): + next_prompt_time = round_time_to_minutes(current_time) + bt.logging.info( + f"scheduling HFT prompt at {next_prompt_time.isoformat()}" ) - wait_time = timeout_until(next_iteration) + self.scheduler.enter( + (next_prompt_time - current_time).total_seconds(), + 1, + self.forward_prompt_hft, + ) + + def forward_prompt(self, simulation_input: SimulationInput): + request_time = get_current_time() + next_prompt_time = round_time_to_minutes(request_time, IN_1_HOUR - 60) bt.logging.info( - f"Waiting for {wait_time/60} minutes until the next simulation", - "forward_prompt", + f"scheduling regular prompt at {next_prompt_time.isoformat()}" ) - await asyncio.sleep(wait_time) - - async def forward_prompt(self): - for simulation_input in self.simulation_input_list: - # ================= Step 1 ================= # - # Getting available miners from metagraph and saving information about them - # and their properties (rank, incentives, emission) at the current moment in the database - # in the metagraph_history table and in the miners table - # ========================================== # - miner_uids = get_available_miners_and_update_metagraph_history( - base_neuron=self, - miner_data_handler=self.miner_data_handler, - ) + self.scheduler.enter( + # round to the next minute and add 2 of 15 minutes and subtract 60 seconds because round_time_to_minutes rounds to the next minute + (next_prompt_time - request_time).total_seconds(), + 1, + self.forward_prompt, + (simulation_input,), + ) - if len(miner_uids) == 0: - bt.logging.error( - "No miners available", - "forward_prompt", - ) - await self.wait_till_next_simulation( - get_current_time(), self.simulation_input_list - ) - continue - - request_time = get_current_time() - start_time = round_time_to_minutes( - request_time, 60, self.timeout_extra_seconds - ) + # ================= Step 1 ================= # + # Getting available miners from metagraph and saving information about them + # and their properties (rank, incentives, emission) at the current moment in the database + # in the metagraph_history table and in the miners table + # ========================================== # + + self.update_miners() - if should_skip_xau(start_time) and simulation_input.asset == "XAU": - bt.logging.info( - "Skipping XAU simulation as market is closed", - "forward_prompt", - ) - await self.wait_till_next_simulation( - request_time, self.simulation_input_list - ) - continue + start_time = round_time_to_minutes(request_time, 60) + + if should_skip_xau(start_time) and simulation_input.asset == "XAU": + bt.logging.info( + "Skipping XAU simulation as market is closed", + "forward_prompt", + ) + return # ================= Step 2 ================= # # Query all the available miners and save all their responses @@ -179,50 +205,21 @@ async def forward_prompt(self): # add the start time to the simulation input simulation_input.start_time = start_time.isoformat() - # TEMP - if request_time >= more_paths_launch_time: - simulation_input.num_simulations = 1000 - # END TEMP - - await query_available_miners_and_save_responses( - base_neuron=self, - miner_data_handler=self.miner_data_handler, - miner_uids=miner_uids, - simulation_input=simulation_input, - request_time=request_time, - ) - - await self.wait_till_next_simulation( - request_time, self.simulation_input_list - ) + query_available_miners_and_save_responses( + base_neuron=self, + miner_data_handler=self.miner_data_handler, + miner_uids=self.miner_uids, + simulation_input=simulation_input, + request_time=request_time, + ) async def forward_score(self): + # getting current time current_time = get_current_time() - next_iteration = current_time + timedelta(minutes=15) - - async def wait_till_next_iteration(): - # wait until the next iteration - wait_time = timeout_until(next_iteration) - bt.logging.info( - f"Waiting for {wait_time/60} minutes until the next iteration", - "forward_score", - ) - await asyncio.sleep(wait_time) - # round current time to the closest minute and add extra minutes # to be sure we are after the start time of the prompt - scored_time = round_time_to_minutes( - current_time, 60, self.timeout_extra_seconds * 2 - ) - - # wait until the score_time - wait_time = timeout_until(scored_time) - bt.logging.info( - f"Waiting for {wait_time/60} minutes to start validating", - "forward_score", - ) - await asyncio.sleep(wait_time) + scored_time = round_time_to_minutes(current_time) # ================= Step 3 ================= # # Calculate rewards based on historical predictions data @@ -241,7 +238,6 @@ async def wait_till_next_iteration(): ) if not success: - await wait_till_next_iteration() return # ================= Step 4 ================= # @@ -259,7 +255,6 @@ async def wait_till_next_iteration(): ) if len(moving_averages_data) == 0: - await wait_till_next_iteration() return # ================= Step 5 ================= # @@ -267,24 +262,6 @@ async def wait_till_next_iteration(): # into bittensor consensus calculation # ========================================== # - moving_averages_data.append( - { - "miner_id": 0, - "miner_uid": ( - 23 if self.config.subtensor.network == "test" else 248 - ), - "smoothed_score": 0, - "reward_weight": sum( - [r["reward_weight"] for r in moving_averages_data] - ), - "updated_at": scored_time.isoformat(), - } - ) - - bt.logging.info( - f"Moving averages data for owner: {moving_averages_data[-1]}" - ) - send_weights_to_bittensor_and_update_weights_history( base_neuron=self, moving_averages_data=moving_averages_data, @@ -292,7 +269,11 @@ async def wait_till_next_iteration(): scored_time=scored_time, ) - await wait_till_next_iteration() + self.scheduler.enter( + IN_15_MINUTES, + 1, + self.forward_score, + ) async def forward_miner(self, _: bt.Synapse) -> bt.Synapse: pass diff --git a/synth/base/dendrite.py b/synth/base/dendrite.py index 748c0c91..f649dded 100644 --- a/synth/base/dendrite.py +++ b/synth/base/dendrite.py @@ -1,7 +1,7 @@ import sys import traceback -from typing import List, Optional, Tuple, Type, Union import time +from typing import List, Optional, Tuple, Type, Union import asyncio import uuid import aiohttp @@ -10,11 +10,12 @@ import bittensor as bt from bittensor_wallet import Keypair, Wallet import httpx -from pydantic import ValidationError import uvloop from synth.protocol import Simulation +from synth.simulation_input import SimulationInput +from synth.utils.helpers import timeout_from_start_time asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) @@ -97,65 +98,51 @@ class SynthDendrite(bt.Dendrite): def __init__(self, wallet: Optional[Union[Wallet, Keypair]] = None): super().__init__(wallet=wallet) - async def forward( + async def forward_hft( self, axons: Union[ list[Union[bt.AxonInfo, bt.Axon]], Union[bt.AxonInfo, bt.Axon] ], - synapse: Simulation, - timeout: float = 12, - run_async: bool = True, - ) -> list[Simulation]: - is_list = True - # If a single axon is provided, wrap it in a list for uniform processing - if not isinstance(axons, list): - is_list = False - axons = [axons] - - async def query_all_axons(): - async def single_axon_response( - target_axon: Union[bt.AxonInfo, bt.Axon], - ) -> Simulation: - async with httpx.AsyncClient( - http2=True, - limits=httpx.Limits( - max_connections=None, max_keepalive_connections=25 - ), - timeout=timeout, - ) as client: - return await self.call_http2( - client=client, - target_axon=target_axon, - synapse=synapse.model_copy(), # type: ignore - timeout=timeout, + simulation_input_list: list[SimulationInput], + ) -> list[tuple[Simulation, int]]: + # Get responses for all axons. + async with httpx.AsyncClient( + http2=True, + limits=httpx.Limits( + max_connections=None, max_keepalive_connections=1000 + ), + ) as client: + tasks = [] + for simulation_idx, simulation_input in enumerate( + simulation_input_list + ): + synapse = Simulation(simulation_input=simulation_input) + for axon_idx, target_axon in enumerate(axons): + # forward the index along with the synapse to identify miner later + tasks.append( + self.call_http2( + client=client, + axon_idx=axon_idx, + simulation_idx=simulation_idx, + target_axon=target_axon, + synapse=synapse.model_copy(), + start_time=simulation_input.start_time, + ) ) - # If run_async flag is False, get responses one by one. - # If run_async flag is True, get responses concurrently using asyncio.gather(). - if not run_async: - return [ - await single_axon_response(target_axon) - for target_axon in axons - ] # type: ignore - - return await asyncio.gather( - *(single_axon_response(target_axon) for target_axon in axons) - ) # type: ignore - - # Get responses for all axons. - responses = await query_all_axons() - # Return the single response if only one axon was targeted, else return all responses - return responses[0] if len(responses) == 1 and not is_list else responses # type: ignore + return await asyncio.gather(*tasks) async def call_http2( self, client: httpx.AsyncClient, + axon_idx: int, target_axon: Union[bt.AxonInfo, bt.Axon], synapse: Simulation, - timeout: float = 12.0, - ) -> Simulation: + start_time: str, + ) -> tuple[Simulation, int]: + timeout = timeout_from_start_time(None, start_time) + # Record start time - start_time = time.time() target_axon = ( target_axon.info() if isinstance(target_axon, bt.Axon) @@ -176,6 +163,7 @@ async def call_http2( self._log_outgoing_request(synapse) # Make the HTTP POST request + start_time = time.time() response = await client.post( url=url, headers=synapse.to_headers(), @@ -191,9 +179,7 @@ async def call_http2( self.process_server_response( status, headers, json_response, synapse ) - - # Set process time and log the response - synapse.dendrite.process_time = str(time.time() - start_time) # type: ignore + synapse.dendrite.process_time = str(time.time() - start_time) except Exception as e: synapse = self.process_error_message(synapse, request_name, e) @@ -202,7 +188,7 @@ async def call_http2( self._log_incoming_response(synapse) # Return the updated synapse object after deserializing if requested - return synapse + return synapse, axon_idx def process_server_response( self, status, _, json_response: dict, local_synapse: Simulation @@ -229,8 +215,10 @@ def process_server_response( local_synapse.axon.status_message = json_response.get("message") # Update the status code and status message of the dendrite to match the axon - local_synapse.dendrite.status_code = local_synapse.axon.status_code # type: ignore - local_synapse.dendrite.status_message = local_synapse.axon.status_message # type: ignore + local_synapse.dendrite.status_code = local_synapse.axon.status_code + local_synapse.dendrite.status_message = ( + local_synapse.axon.status_message + ) def log_exception(self, exception: Exception): log_exception(exception) @@ -262,7 +250,6 @@ def log_exception(exception: Exception): httpx.ReadTimeout, httpx.ConnectTimeout, httpx.RemoteProtocolError, - ValidationError, ), ): bt.logging.trace(f"{error_type}#{error_id}: {exception}") diff --git a/synth/base/miner.py b/synth/base/miner.py index 257c3900..57ddbcf0 100644 --- a/synth/base/miner.py +++ b/synth/base/miner.py @@ -20,14 +20,22 @@ import threading import argparse import traceback +from typing import Union + import bittensor as bt +from bittensor.core.axon import V_7_2_0 +from bittensor.core.errors import SynapseDendriteNoneException +from bittensor_wallet import Keypair +from bittensor.utils.axon_utils import ( + allowed_nonce_window_ns, + calculate_diff_seconds, +) + from synth.base.neuron import BaseNeuron from synth.utils.config import add_miner_args -from typing import Union - class BaseMinerNeuron(BaseNeuron): """ @@ -65,6 +73,7 @@ def __init__(self, config=None): forward_fn=self.forward_miner, blacklist_fn=self.blacklist, priority_fn=self.priority, + verify_fn=self.verify, ) bt.logging.info(f"Axon created: {self.axon}") @@ -74,6 +83,132 @@ def __init__(self, config=None): self.thread: Union[threading.Thread, None] = None self.lock = asyncio.Lock() + async def verify(self, synapse: bt.Synapse): + """ + This method is used to verify the authenticity of a received message using a digital signature. + + It ensures that the message was not tampered with and was sent by the expected sender. + + The :func:`default_verify` method in the Bittensor framework is a critical security function within the + Axon server. It is designed to authenticate incoming messages by verifying their digital + signatures. This verification ensures the integrity of the message and confirms that it was + indeed sent by the claimed sender. The method plays a pivotal role in maintaining the trustworthiness + and reliability of the communication within the Bittensor network. + + Key Features + Security Assurance + The default_verify method is crucial for ensuring the security of the Bittensor network. By verifying + digital signatures, it guards against unauthorized access and data manipulation. + + Preventing Replay Attacks + The method checks for increasing nonce values, which is a vital + step in preventing replay attacks. A replay attack involves an adversary reusing or + delaying the transmission of a valid data transmission to deceive the receiver. + The first time a nonce is seen, it is checked for freshness by ensuring it is + within an acceptable delta time range. + + Authenticity and Integrity Checks + By verifying that the message's digital signature matches + its content, the method ensures the message's authenticity (it comes from the claimed + sender) and integrity (it hasn't been altered during transmission). + + Trust in Communication + This method fosters trust in the network communication. Neurons + (nodes in the Bittensor network) can confidently interact, knowing that the messages they + receive are genuine and have not been tampered with. + + Cryptographic Techniques + The method's reliance on asymmetric encryption techniques is a + cornerstone of modern cryptographic security, ensuring that only entities with the correct + cryptographic keys can participate in secure communication. + + Args: + synapse(bittensor.core.synapse.Synapse): bittensor request synapse. + + Raises: + Exception: If the ``receiver_hotkey`` doesn't match with ``self.receiver_hotkey``. + Exception: If the nonce is not larger than the previous nonce for the same endpoint key. + Exception: If the signature verification fails. + + After successful verification, the nonce for the given endpoint key is updated. + + Note: + The verification process assumes the use of an asymmetric encryption algorithm, + where the sender signs the message with their private key and the receiver verifies the + signature using the sender's public key. + """ + # Build the keypair from the dendrite_hotkey + if synapse.dendrite is not None: + keypair = Keypair(ss58_address=synapse.dendrite.hotkey) + + # Build the signature messages. + message = f"{synapse.dendrite.nonce}.{synapse.dendrite.hotkey}.{self.wallet.hotkey.ss58_address}.{synapse.dendrite.uuid}.{synapse.computed_body_hash}" + + # Build the unique endpoint key. + endpoint_key = f"{synapse.dendrite.hotkey}:{synapse.dendrite.uuid}" + + # Requests must have nonces to be safe from replays + if synapse.dendrite.nonce is None: + raise Exception("Missing Nonce") + + # Newer nonce structure post v7.2 + if ( + synapse.dendrite.version is not None + and synapse.dendrite.version >= V_7_2_0 + ): + # If we don't have a nonce stored, ensure that the nonce falls within + # a reasonable delta. + current_time_ns = time.time_ns() + allowed_window_ns = allowed_nonce_window_ns( + current_time_ns, synapse.timeout + ) + + if ( + self.nonces.get(endpoint_key) is None + and synapse.dendrite.nonce <= allowed_window_ns + ): + diff_seconds, allowed_delta_seconds = ( + calculate_diff_seconds( + current_time_ns, + synapse.timeout, + synapse.dendrite.nonce, + ) + ) + raise Exception( + f"Nonce is too old: acceptable delta is {allowed_delta_seconds:.2f} seconds but request was {diff_seconds:.2f} seconds old" + ) + + # If a nonce is stored, ensure the new nonce + # is greater or equal to than the previous nonce + if ( + self.nonces.get(endpoint_key) is not None + and synapse.dendrite.nonce < self.nonces[endpoint_key] + ): + raise Exception( + "Nonce is too old, a newer one was last processed" + ) + # Older nonce structure pre v7.2 + else: + if ( + self.nonces.get(endpoint_key) is not None + and synapse.dendrite.nonce < self.nonces[endpoint_key] + ): + raise Exception( + "Nonce is too old, a newer one was last processed" + ) + + if synapse.dendrite.signature and not keypair.verify( + message, synapse.dendrite.signature + ): + raise Exception( + f"Signature mismatch with {message} and {synapse.dendrite.signature}" + ) + + # Success + self.nonces[endpoint_key] = synapse.dendrite.nonce # type: ignore + else: + raise SynapseDendriteNoneException(synapse=synapse) + def run(self): """ Initiates and manages the main loop for the miner on the Bittensor network. The main loop handles graceful shutdown on keyboard interrupts and logs unforeseen errors. diff --git a/synth/base/neuron.py b/synth/base/neuron.py index 8364fc52..57e2c234 100644 --- a/synth/base/neuron.py +++ b/synth/base/neuron.py @@ -102,7 +102,7 @@ def __init__(self, config=None): async def forward_miner(self, synapse: bt.Synapse) -> bt.Synapse: ... @abstractmethod - async def forward_validator(self): ... + def forward_validator(self): ... @abstractmethod def resync_metagraph(self): ... diff --git a/synth/base/validator.py b/synth/base/validator.py index 5c6cd09e..c719f5c1 100644 --- a/synth/base/validator.py +++ b/synth/base/validator.py @@ -21,13 +21,13 @@ import sys import copy import numpy as np -import asyncio import argparse -import threading +from typing import List +import traceback + + import bittensor as bt -from typing import List, Union -import traceback from synth.base.dendrite import SynthDendrite from synth.base.neuron import BaseNeuron @@ -73,14 +73,6 @@ def __init__(self, config=None): else: bt.logging.warning("axon off, not serving ip to chain.") - # Create asyncio event loop to manage async tasks. - self.loop = asyncio.get_event_loop() - - # Instantiate runners - self.should_exit = False - self.is_running = False - self.thread: Union[threading.Thread, None] = None - def serve_axon(self): """Serve axon to enable external connections.""" @@ -104,10 +96,6 @@ def serve_axon(self): f"Failed to create Axon initialize with exception: {e}" ) - async def concurrent_forward(self): - coroutines = await self.forward_validator() - await asyncio.gather(*coroutines) - def run(self): """ Initiates and manages the main loop for the miner on the Bittensor network. The main loop handles graceful shutdown on keyboard interrupts and logs unforeseen errors. @@ -131,19 +119,8 @@ def run(self): # This loop maintains the validator's operations until intentionally stopped. try: - while True: - # Run multiple forwards concurrently. - self.loop.run_until_complete(self.concurrent_forward()) - - # Check if we should exit. - if self.should_exit: - break - - # Sync metagraph and potentially set weights. - self.sync() - - self.step += 1 - + # Run forwards. + self.forward_validator() # If someone intentionally stops the validator, it'll safely terminate operations. except KeyboardInterrupt: if not self.config.neuron.axon_off: @@ -156,50 +133,6 @@ def run(self): bt.logging.error(f"Error during validation: {str(err)}") traceback.print_exc(file=sys.stderr) - def run_in_background_thread(self): - """ - Starts the validator's operations in a background thread upon entering the context. - This method facilitates the use of the validator in a 'with' statement. - """ - if not self.is_running: - bt.logging.debug("Starting validator in background thread.") - self.should_exit = False - self.thread = threading.Thread(target=self.run, daemon=True) - self.thread.start() - self.is_running = True - bt.logging.debug("Started") - - def stop_run_thread(self): - """ - Stops the validator's operations that are running in the background thread. - """ - if self.is_running: - bt.logging.debug("Stopping validator in background thread.") - self.should_exit = True - if self.thread is not None: - self.thread.join(5) - self.is_running = False - bt.logging.debug("Stopped") - - def __enter__(self): - self.run_in_background_thread() - return self - - def __exit__(self, exc_type, exc_value, traceback): - """ - Stops the validator's background operations upon exiting the context. - This method facilitates the use of the validator in a 'with' statement. - - Args: - exc_type: The type of the exception that caused the context to be exited. - None if the context was exited without an exception. - exc_value: The instance of the exception that caused the context to be exited. - None if the context was exited without an exception. - traceback: A traceback object encoding the stack trace. - None if the context was exited without an exception. - """ - self.stop_run_thread() - def set_weights(self): """ Sets the validator weights to the metagraph hotkeys based on the scores it has received from the miners. The weights determine the trust and incentive level the validator assigns to miner nodes on the network. diff --git a/synth/validator/forward.py b/synth/validator/forward.py index d76e7d29..8513706e 100644 --- a/synth/validator/forward.py +++ b/synth/validator/forward.py @@ -22,6 +22,8 @@ import typing import sys import traceback +import asyncio + import bittensor as bt import numpy as np @@ -198,13 +200,20 @@ def calculate_rewards_and_update_scores( return fail_count != len(validator_requests) -async def query_available_miners_and_save_responses( +def query_available_miners_and_save_responses( base_neuron: BaseValidatorNeuron, miner_data_handler: MinerDataHandler, miner_uids: list, simulation_input: SimulationInput, request_time: datetime, ): + if len(miner_uids) == 0: + bt.logging.error( + "No miners available", + "query_available_miners_and_save_responses", + ) + return + timeout = timeout_from_start_time( base_neuron.config.neuron.timeout, simulation_input.start_time ) @@ -268,6 +277,65 @@ async def query_available_miners_and_save_responses( bt.logging.info("skip saving because no prediction") +def query_available_miners_and_save_responses_hft( + base_neuron: BaseValidatorNeuron, + miner_data_handler: MinerDataHandler, + miner_uids: list, + simulation_input_list: list[SimulationInput], + request_time: datetime, +): + if len(miner_uids) == 0: + bt.logging.error( + "No miners available", + "query_available_miners_and_save_responses_hft", + ) + return + + axons = [base_neuron.metagraph.axons[uid] for uid in miner_uids] + + start_time = time.time() + + synapses = asyncio.run( + base_neuron.dendrite.forward_hft( + axons=axons, + simulation_input_list=simulation_input_list, + ) + ) + + total_process_time = str(time.time() - start_time) + bt.logging.debug( + f"Forwarding took {total_process_time} seconds", "forward_hft" + ) + + miner_predictions = {} + for simulation_input in simulation_input_list: + miner_predictions[simulation_input.asset] = {} + + for synapse, idx in synapses: + response = synapse.deserialize() + process_time = synapse.dendrite.process_time + format_validation = validate_responses( + response, simulation_input_list[0], request_time, process_time + ) + # use index of synapse to get miner id + miner_id = miner_uids[idx] + miner_predictions[synapse.simulation_input.asset][miner_id] = ( + response, + format_validation, + process_time, + ) + + for simulation_input in simulation_input_list: + if len(miner_predictions[simulation_input.asset]) > 0: + miner_data_handler.save_responses( + miner_predictions[simulation_input.asset], + simulation_input, + request_time, + ) + else: + bt.logging.info("skip saving because no prediction") + + def get_available_miners_and_update_metagraph_history( base_neuron: BaseValidatorNeuron, miner_data_handler: MinerDataHandler, From 0cb733457d7b95cfa5942b22add95f370b05e12d Mon Sep 17 00:00:00 2001 From: Thykof Date: Thu, 13 Nov 2025 18:12:45 +0100 Subject: [PATCH 08/24] Revert "wip HFT" This reverts commit 887b3200ba7d9f55b1f514f653962ecd8f23c8b2. --- neurons/miner.py | 2 +- neurons/validator.py | 227 ++++++++++++++++++++----------------- synth/base/dendrite.py | 97 +++++++++------- synth/base/miner.py | 139 +---------------------- synth/base/neuron.py | 2 +- synth/base/validator.py | 79 ++++++++++++- synth/validator/forward.py | 70 +----------- 7 files changed, 256 insertions(+), 360 deletions(-) diff --git a/neurons/miner.py b/neurons/miner.py index 616a71df..ab85ece5 100644 --- a/neurons/miner.py +++ b/neurons/miner.py @@ -173,7 +173,7 @@ def load_state(self): def set_weights(self): pass - def forward_validator(self): + async def forward_validator(self): pass def print_info(self): diff --git a/neurons/validator.py b/neurons/validator.py index e0a4fee2..8eee8806 100644 --- a/neurons/validator.py +++ b/neurons/validator.py @@ -1,10 +1,9 @@ # The MIT License (MIT) # Copyright © 2023 Yuma Rao # Copyright © 2023 Mode Labs -from datetime import datetime +import asyncio +from datetime import datetime, timedelta import multiprocessing as mp -import sched -import time # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the “Software”), to deal in the Software without restriction, including without limitation @@ -30,12 +29,15 @@ from synth.utils.helpers import ( get_current_time, round_time_to_minutes, + timeout_until, + more_paths_launch_time, ) from synth.utils.logging import setup_gcp_logging from synth.utils.opening_hours import should_skip_xau from synth.validator.forward import ( calculate_moving_average_and_update_rewards, calculate_rewards_and_update_scores, + get_available_miners_and_update_metagraph_history, query_available_miners_and_save_responses, send_weights_to_bittensor_and_update_weights_history, ) @@ -46,14 +48,10 @@ load_dotenv() -# Constants -IN_1_HOUR = 60 * 60 -IN_15_MINUTES = 60 * 15 -IN_2_MINUTES = 60 * 2 - - class Validator(BaseValidatorNeuron): """ + Your validator neuron class. You should use this class to define your validator's behavior. In particular, you should replace the forward function with your own logic. + This class inherits from the BaseValidatorNeuron class, which in turn inherits from BaseNeuron. The BaseNeuron class takes care of routine tasks such as setting up wallet, subtensor, metagraph, logging directory, parsing config, etc. You can override any of the methods in BaseNeuron if you need to customize the behavior. This class provides reasonable default behavior for a validator such as keeping a moving average of the scores of the miners and using them to set weights at the end of each epoch. Additionally, the scores are reset for new hotkeys at the end of each epoch. @@ -66,41 +64,38 @@ def __init__(self, config=None): bt.logging.info("load_state()") self.load_state() - self.miner_uids: list[int] = [] self.miner_data_handler = MinerDataHandler() self.price_data_provider = PriceDataProvider() - self.scheduler = sched.scheduler(time.time, time.sleep) - self.simulation_input_list = [ # input data: give me prediction of BTC price for the next 1 day for every 5 min of time SimulationInput( asset="BTC", time_increment=300, - time_length=900, - num_simulations=1000, + time_length=86400, + num_simulations=100, ), SimulationInput( asset="ETH", time_increment=300, time_length=86400, - num_simulations=1000, + num_simulations=100, ), SimulationInput( asset="XAU", time_increment=300, time_length=86400, - num_simulations=1000, + num_simulations=100, ), SimulationInput( asset="SOL", time_increment=300, time_length=86400, - num_simulations=1000, + num_simulations=100, ), ] - self.timeout_extra_seconds = 120 + self.timeout_extra_seconds = 60 self.assert_assets_supported() @@ -109,93 +104,72 @@ def assert_assets_supported(self): for simulation in self.simulation_input_list: assert simulation.asset in PriceDataProvider.TOKEN_MAP - def forward_validator(self): + async def forward_validator(self): """ Validator forward pass. Consists of: - Generating the query - Querying the miners - Getting the responses - - Calculating scores - - Updating the scores - Rewarding the miners + - Updating the scores """ bt.logging.info("calling forward_validator()") + return [ + asyncio.create_task(self.forward_prompt()), + asyncio.create_task(self.forward_score()), + ] - current_time = get_current_time() - - # for idx, simulation_input in enumerate(self.simulation_input_list): - # # shift by 30 seconds to not overlap with HFT prompts - # next_prompt_time = round_time_to_minutes( - # current_time, (IN_15_MINUTES * idx) + 30 - # ) - # bt.logging.info( - # f"scheduling regular prompt at {next_prompt_time.isoformat()}" - # ) - # self.scheduler.enter( - # (next_prompt_time - current_time).total_seconds(), - # 1, - # self.forward_prompt, - # (simulation_input,), - # ) - - # initialize miners data - self.update_miners() - - # send simultaneously all assets for HFT - self.schedule_prompt_hft(current_time) - - # bt.logging.info( - # f"scheduling scoring at {(current_time + timedelta(seconds=IN_15_MINUTES)).isoformat()}" - # ) - # self.scheduler.enter( - # IN_15_MINUTES, - # 1, - # self.forward_score, - # ) - self.scheduler.run() - - def schedule_prompt_hft(self, current_time: datetime): - next_prompt_time = round_time_to_minutes(current_time) - bt.logging.info( - f"scheduling HFT prompt at {next_prompt_time.isoformat()}" + async def wait_till_next_simulation( + self, request_time: datetime, simulation_input_list: list + ): + # wait until the next simulation + next_iteration = request_time + timedelta( + minutes=60 / len(simulation_input_list) ) - self.scheduler.enter( - (next_prompt_time - current_time).total_seconds(), - 1, - self.forward_prompt_hft, - ) - - def forward_prompt(self, simulation_input: SimulationInput): - request_time = get_current_time() - next_prompt_time = round_time_to_minutes(request_time, IN_1_HOUR - 60) + wait_time = timeout_until(next_iteration) bt.logging.info( - f"scheduling regular prompt at {next_prompt_time.isoformat()}" - ) - - self.scheduler.enter( - # round to the next minute and add 2 of 15 minutes and subtract 60 seconds because round_time_to_minutes rounds to the next minute - (next_prompt_time - request_time).total_seconds(), - 1, - self.forward_prompt, - (simulation_input,), + f"Waiting for {wait_time/60} minutes until the next simulation", + "forward_prompt", ) + await asyncio.sleep(wait_time) + + async def forward_prompt(self): + for simulation_input in self.simulation_input_list: + # ================= Step 1 ================= # + # Getting available miners from metagraph and saving information about them + # and their properties (rank, incentives, emission) at the current moment in the database + # in the metagraph_history table and in the miners table + # ========================================== # - # ================= Step 1 ================= # - # Getting available miners from metagraph and saving information about them - # and their properties (rank, incentives, emission) at the current moment in the database - # in the metagraph_history table and in the miners table - # ========================================== # - - self.update_miners() - - start_time = round_time_to_minutes(request_time, 60) + miner_uids = get_available_miners_and_update_metagraph_history( + base_neuron=self, + miner_data_handler=self.miner_data_handler, + ) - if should_skip_xau(start_time) and simulation_input.asset == "XAU": - bt.logging.info( - "Skipping XAU simulation as market is closed", - "forward_prompt", + if len(miner_uids) == 0: + bt.logging.error( + "No miners available", + "forward_prompt", + ) + await self.wait_till_next_simulation( + get_current_time(), self.simulation_input_list + ) + continue + + request_time = get_current_time() + start_time = round_time_to_minutes( + request_time, 60, self.timeout_extra_seconds ) - return + + if should_skip_xau(start_time) and simulation_input.asset == "XAU": + bt.logging.info( + "Skipping XAU simulation as market is closed", + "forward_prompt", + ) + await self.wait_till_next_simulation( + request_time, self.simulation_input_list + ) + continue # ================= Step 2 ================= # # Query all the available miners and save all their responses @@ -205,21 +179,50 @@ def forward_prompt(self, simulation_input: SimulationInput): # add the start time to the simulation input simulation_input.start_time = start_time.isoformat() - query_available_miners_and_save_responses( - base_neuron=self, - miner_data_handler=self.miner_data_handler, - miner_uids=self.miner_uids, - simulation_input=simulation_input, - request_time=request_time, - ) + # TEMP + if request_time >= more_paths_launch_time: + simulation_input.num_simulations = 1000 + # END TEMP + + await query_available_miners_and_save_responses( + base_neuron=self, + miner_data_handler=self.miner_data_handler, + miner_uids=miner_uids, + simulation_input=simulation_input, + request_time=request_time, + ) + + await self.wait_till_next_simulation( + request_time, self.simulation_input_list + ) async def forward_score(self): - # getting current time current_time = get_current_time() + next_iteration = current_time + timedelta(minutes=15) + + async def wait_till_next_iteration(): + # wait until the next iteration + wait_time = timeout_until(next_iteration) + bt.logging.info( + f"Waiting for {wait_time/60} minutes until the next iteration", + "forward_score", + ) + await asyncio.sleep(wait_time) + # round current time to the closest minute and add extra minutes # to be sure we are after the start time of the prompt - scored_time = round_time_to_minutes(current_time) + scored_time = round_time_to_minutes( + current_time, 60, self.timeout_extra_seconds * 2 + ) + + # wait until the score_time + wait_time = timeout_until(scored_time) + bt.logging.info( + f"Waiting for {wait_time/60} minutes to start validating", + "forward_score", + ) + await asyncio.sleep(wait_time) # ================= Step 3 ================= # # Calculate rewards based on historical predictions data @@ -238,6 +241,7 @@ async def forward_score(self): ) if not success: + await wait_till_next_iteration() return # ================= Step 4 ================= # @@ -255,6 +259,7 @@ async def forward_score(self): ) if len(moving_averages_data) == 0: + await wait_till_next_iteration() return # ================= Step 5 ================= # @@ -262,6 +267,24 @@ async def forward_score(self): # into bittensor consensus calculation # ========================================== # + moving_averages_data.append( + { + "miner_id": 0, + "miner_uid": ( + 23 if self.config.subtensor.network == "test" else 248 + ), + "smoothed_score": 0, + "reward_weight": sum( + [r["reward_weight"] for r in moving_averages_data] + ), + "updated_at": scored_time.isoformat(), + } + ) + + bt.logging.info( + f"Moving averages data for owner: {moving_averages_data[-1]}" + ) + send_weights_to_bittensor_and_update_weights_history( base_neuron=self, moving_averages_data=moving_averages_data, @@ -269,11 +292,7 @@ async def forward_score(self): scored_time=scored_time, ) - self.scheduler.enter( - IN_15_MINUTES, - 1, - self.forward_score, - ) + await wait_till_next_iteration() async def forward_miner(self, _: bt.Synapse) -> bt.Synapse: pass diff --git a/synth/base/dendrite.py b/synth/base/dendrite.py index f649dded..748c0c91 100644 --- a/synth/base/dendrite.py +++ b/synth/base/dendrite.py @@ -1,7 +1,7 @@ import sys import traceback -import time from typing import List, Optional, Tuple, Type, Union +import time import asyncio import uuid import aiohttp @@ -10,12 +10,11 @@ import bittensor as bt from bittensor_wallet import Keypair, Wallet import httpx +from pydantic import ValidationError import uvloop from synth.protocol import Simulation -from synth.simulation_input import SimulationInput -from synth.utils.helpers import timeout_from_start_time asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) @@ -98,51 +97,65 @@ class SynthDendrite(bt.Dendrite): def __init__(self, wallet: Optional[Union[Wallet, Keypair]] = None): super().__init__(wallet=wallet) - async def forward_hft( + async def forward( self, axons: Union[ list[Union[bt.AxonInfo, bt.Axon]], Union[bt.AxonInfo, bt.Axon] ], - simulation_input_list: list[SimulationInput], - ) -> list[tuple[Simulation, int]]: - # Get responses for all axons. - async with httpx.AsyncClient( - http2=True, - limits=httpx.Limits( - max_connections=None, max_keepalive_connections=1000 - ), - ) as client: - tasks = [] - for simulation_idx, simulation_input in enumerate( - simulation_input_list - ): - synapse = Simulation(simulation_input=simulation_input) - for axon_idx, target_axon in enumerate(axons): - # forward the index along with the synapse to identify miner later - tasks.append( - self.call_http2( - client=client, - axon_idx=axon_idx, - simulation_idx=simulation_idx, - target_axon=target_axon, - synapse=synapse.model_copy(), - start_time=simulation_input.start_time, - ) + synapse: Simulation, + timeout: float = 12, + run_async: bool = True, + ) -> list[Simulation]: + is_list = True + # If a single axon is provided, wrap it in a list for uniform processing + if not isinstance(axons, list): + is_list = False + axons = [axons] + + async def query_all_axons(): + async def single_axon_response( + target_axon: Union[bt.AxonInfo, bt.Axon], + ) -> Simulation: + async with httpx.AsyncClient( + http2=True, + limits=httpx.Limits( + max_connections=None, max_keepalive_connections=25 + ), + timeout=timeout, + ) as client: + return await self.call_http2( + client=client, + target_axon=target_axon, + synapse=synapse.model_copy(), # type: ignore + timeout=timeout, ) - return await asyncio.gather(*tasks) + # If run_async flag is False, get responses one by one. + # If run_async flag is True, get responses concurrently using asyncio.gather(). + if not run_async: + return [ + await single_axon_response(target_axon) + for target_axon in axons + ] # type: ignore + + return await asyncio.gather( + *(single_axon_response(target_axon) for target_axon in axons) + ) # type: ignore + + # Get responses for all axons. + responses = await query_all_axons() + # Return the single response if only one axon was targeted, else return all responses + return responses[0] if len(responses) == 1 and not is_list else responses # type: ignore async def call_http2( self, client: httpx.AsyncClient, - axon_idx: int, target_axon: Union[bt.AxonInfo, bt.Axon], synapse: Simulation, - start_time: str, - ) -> tuple[Simulation, int]: - timeout = timeout_from_start_time(None, start_time) - + timeout: float = 12.0, + ) -> Simulation: # Record start time + start_time = time.time() target_axon = ( target_axon.info() if isinstance(target_axon, bt.Axon) @@ -163,7 +176,6 @@ async def call_http2( self._log_outgoing_request(synapse) # Make the HTTP POST request - start_time = time.time() response = await client.post( url=url, headers=synapse.to_headers(), @@ -179,7 +191,9 @@ async def call_http2( self.process_server_response( status, headers, json_response, synapse ) - synapse.dendrite.process_time = str(time.time() - start_time) + + # Set process time and log the response + synapse.dendrite.process_time = str(time.time() - start_time) # type: ignore except Exception as e: synapse = self.process_error_message(synapse, request_name, e) @@ -188,7 +202,7 @@ async def call_http2( self._log_incoming_response(synapse) # Return the updated synapse object after deserializing if requested - return synapse, axon_idx + return synapse def process_server_response( self, status, _, json_response: dict, local_synapse: Simulation @@ -215,10 +229,8 @@ def process_server_response( local_synapse.axon.status_message = json_response.get("message") # Update the status code and status message of the dendrite to match the axon - local_synapse.dendrite.status_code = local_synapse.axon.status_code - local_synapse.dendrite.status_message = ( - local_synapse.axon.status_message - ) + local_synapse.dendrite.status_code = local_synapse.axon.status_code # type: ignore + local_synapse.dendrite.status_message = local_synapse.axon.status_message # type: ignore def log_exception(self, exception: Exception): log_exception(exception) @@ -250,6 +262,7 @@ def log_exception(exception: Exception): httpx.ReadTimeout, httpx.ConnectTimeout, httpx.RemoteProtocolError, + ValidationError, ), ): bt.logging.trace(f"{error_type}#{error_id}: {exception}") diff --git a/synth/base/miner.py b/synth/base/miner.py index 57ddbcf0..257c3900 100644 --- a/synth/base/miner.py +++ b/synth/base/miner.py @@ -20,22 +20,14 @@ import threading import argparse import traceback -from typing import Union - import bittensor as bt -from bittensor.core.axon import V_7_2_0 -from bittensor.core.errors import SynapseDendriteNoneException -from bittensor_wallet import Keypair -from bittensor.utils.axon_utils import ( - allowed_nonce_window_ns, - calculate_diff_seconds, -) - from synth.base.neuron import BaseNeuron from synth.utils.config import add_miner_args +from typing import Union + class BaseMinerNeuron(BaseNeuron): """ @@ -73,7 +65,6 @@ def __init__(self, config=None): forward_fn=self.forward_miner, blacklist_fn=self.blacklist, priority_fn=self.priority, - verify_fn=self.verify, ) bt.logging.info(f"Axon created: {self.axon}") @@ -83,132 +74,6 @@ def __init__(self, config=None): self.thread: Union[threading.Thread, None] = None self.lock = asyncio.Lock() - async def verify(self, synapse: bt.Synapse): - """ - This method is used to verify the authenticity of a received message using a digital signature. - - It ensures that the message was not tampered with and was sent by the expected sender. - - The :func:`default_verify` method in the Bittensor framework is a critical security function within the - Axon server. It is designed to authenticate incoming messages by verifying their digital - signatures. This verification ensures the integrity of the message and confirms that it was - indeed sent by the claimed sender. The method plays a pivotal role in maintaining the trustworthiness - and reliability of the communication within the Bittensor network. - - Key Features - Security Assurance - The default_verify method is crucial for ensuring the security of the Bittensor network. By verifying - digital signatures, it guards against unauthorized access and data manipulation. - - Preventing Replay Attacks - The method checks for increasing nonce values, which is a vital - step in preventing replay attacks. A replay attack involves an adversary reusing or - delaying the transmission of a valid data transmission to deceive the receiver. - The first time a nonce is seen, it is checked for freshness by ensuring it is - within an acceptable delta time range. - - Authenticity and Integrity Checks - By verifying that the message's digital signature matches - its content, the method ensures the message's authenticity (it comes from the claimed - sender) and integrity (it hasn't been altered during transmission). - - Trust in Communication - This method fosters trust in the network communication. Neurons - (nodes in the Bittensor network) can confidently interact, knowing that the messages they - receive are genuine and have not been tampered with. - - Cryptographic Techniques - The method's reliance on asymmetric encryption techniques is a - cornerstone of modern cryptographic security, ensuring that only entities with the correct - cryptographic keys can participate in secure communication. - - Args: - synapse(bittensor.core.synapse.Synapse): bittensor request synapse. - - Raises: - Exception: If the ``receiver_hotkey`` doesn't match with ``self.receiver_hotkey``. - Exception: If the nonce is not larger than the previous nonce for the same endpoint key. - Exception: If the signature verification fails. - - After successful verification, the nonce for the given endpoint key is updated. - - Note: - The verification process assumes the use of an asymmetric encryption algorithm, - where the sender signs the message with their private key and the receiver verifies the - signature using the sender's public key. - """ - # Build the keypair from the dendrite_hotkey - if synapse.dendrite is not None: - keypair = Keypair(ss58_address=synapse.dendrite.hotkey) - - # Build the signature messages. - message = f"{synapse.dendrite.nonce}.{synapse.dendrite.hotkey}.{self.wallet.hotkey.ss58_address}.{synapse.dendrite.uuid}.{synapse.computed_body_hash}" - - # Build the unique endpoint key. - endpoint_key = f"{synapse.dendrite.hotkey}:{synapse.dendrite.uuid}" - - # Requests must have nonces to be safe from replays - if synapse.dendrite.nonce is None: - raise Exception("Missing Nonce") - - # Newer nonce structure post v7.2 - if ( - synapse.dendrite.version is not None - and synapse.dendrite.version >= V_7_2_0 - ): - # If we don't have a nonce stored, ensure that the nonce falls within - # a reasonable delta. - current_time_ns = time.time_ns() - allowed_window_ns = allowed_nonce_window_ns( - current_time_ns, synapse.timeout - ) - - if ( - self.nonces.get(endpoint_key) is None - and synapse.dendrite.nonce <= allowed_window_ns - ): - diff_seconds, allowed_delta_seconds = ( - calculate_diff_seconds( - current_time_ns, - synapse.timeout, - synapse.dendrite.nonce, - ) - ) - raise Exception( - f"Nonce is too old: acceptable delta is {allowed_delta_seconds:.2f} seconds but request was {diff_seconds:.2f} seconds old" - ) - - # If a nonce is stored, ensure the new nonce - # is greater or equal to than the previous nonce - if ( - self.nonces.get(endpoint_key) is not None - and synapse.dendrite.nonce < self.nonces[endpoint_key] - ): - raise Exception( - "Nonce is too old, a newer one was last processed" - ) - # Older nonce structure pre v7.2 - else: - if ( - self.nonces.get(endpoint_key) is not None - and synapse.dendrite.nonce < self.nonces[endpoint_key] - ): - raise Exception( - "Nonce is too old, a newer one was last processed" - ) - - if synapse.dendrite.signature and not keypair.verify( - message, synapse.dendrite.signature - ): - raise Exception( - f"Signature mismatch with {message} and {synapse.dendrite.signature}" - ) - - # Success - self.nonces[endpoint_key] = synapse.dendrite.nonce # type: ignore - else: - raise SynapseDendriteNoneException(synapse=synapse) - def run(self): """ Initiates and manages the main loop for the miner on the Bittensor network. The main loop handles graceful shutdown on keyboard interrupts and logs unforeseen errors. diff --git a/synth/base/neuron.py b/synth/base/neuron.py index 57e2c234..8364fc52 100644 --- a/synth/base/neuron.py +++ b/synth/base/neuron.py @@ -102,7 +102,7 @@ def __init__(self, config=None): async def forward_miner(self, synapse: bt.Synapse) -> bt.Synapse: ... @abstractmethod - def forward_validator(self): ... + async def forward_validator(self): ... @abstractmethod def resync_metagraph(self): ... diff --git a/synth/base/validator.py b/synth/base/validator.py index c719f5c1..5c6cd09e 100644 --- a/synth/base/validator.py +++ b/synth/base/validator.py @@ -21,13 +21,13 @@ import sys import copy import numpy as np +import asyncio import argparse -from typing import List -import traceback - - +import threading import bittensor as bt +from typing import List, Union +import traceback from synth.base.dendrite import SynthDendrite from synth.base.neuron import BaseNeuron @@ -73,6 +73,14 @@ def __init__(self, config=None): else: bt.logging.warning("axon off, not serving ip to chain.") + # Create asyncio event loop to manage async tasks. + self.loop = asyncio.get_event_loop() + + # Instantiate runners + self.should_exit = False + self.is_running = False + self.thread: Union[threading.Thread, None] = None + def serve_axon(self): """Serve axon to enable external connections.""" @@ -96,6 +104,10 @@ def serve_axon(self): f"Failed to create Axon initialize with exception: {e}" ) + async def concurrent_forward(self): + coroutines = await self.forward_validator() + await asyncio.gather(*coroutines) + def run(self): """ Initiates and manages the main loop for the miner on the Bittensor network. The main loop handles graceful shutdown on keyboard interrupts and logs unforeseen errors. @@ -119,8 +131,19 @@ def run(self): # This loop maintains the validator's operations until intentionally stopped. try: - # Run forwards. - self.forward_validator() + while True: + # Run multiple forwards concurrently. + self.loop.run_until_complete(self.concurrent_forward()) + + # Check if we should exit. + if self.should_exit: + break + + # Sync metagraph and potentially set weights. + self.sync() + + self.step += 1 + # If someone intentionally stops the validator, it'll safely terminate operations. except KeyboardInterrupt: if not self.config.neuron.axon_off: @@ -133,6 +156,50 @@ def run(self): bt.logging.error(f"Error during validation: {str(err)}") traceback.print_exc(file=sys.stderr) + def run_in_background_thread(self): + """ + Starts the validator's operations in a background thread upon entering the context. + This method facilitates the use of the validator in a 'with' statement. + """ + if not self.is_running: + bt.logging.debug("Starting validator in background thread.") + self.should_exit = False + self.thread = threading.Thread(target=self.run, daemon=True) + self.thread.start() + self.is_running = True + bt.logging.debug("Started") + + def stop_run_thread(self): + """ + Stops the validator's operations that are running in the background thread. + """ + if self.is_running: + bt.logging.debug("Stopping validator in background thread.") + self.should_exit = True + if self.thread is not None: + self.thread.join(5) + self.is_running = False + bt.logging.debug("Stopped") + + def __enter__(self): + self.run_in_background_thread() + return self + + def __exit__(self, exc_type, exc_value, traceback): + """ + Stops the validator's background operations upon exiting the context. + This method facilitates the use of the validator in a 'with' statement. + + Args: + exc_type: The type of the exception that caused the context to be exited. + None if the context was exited without an exception. + exc_value: The instance of the exception that caused the context to be exited. + None if the context was exited without an exception. + traceback: A traceback object encoding the stack trace. + None if the context was exited without an exception. + """ + self.stop_run_thread() + def set_weights(self): """ Sets the validator weights to the metagraph hotkeys based on the scores it has received from the miners. The weights determine the trust and incentive level the validator assigns to miner nodes on the network. diff --git a/synth/validator/forward.py b/synth/validator/forward.py index 8513706e..d76e7d29 100644 --- a/synth/validator/forward.py +++ b/synth/validator/forward.py @@ -22,8 +22,6 @@ import typing import sys import traceback -import asyncio - import bittensor as bt import numpy as np @@ -200,20 +198,13 @@ def calculate_rewards_and_update_scores( return fail_count != len(validator_requests) -def query_available_miners_and_save_responses( +async def query_available_miners_and_save_responses( base_neuron: BaseValidatorNeuron, miner_data_handler: MinerDataHandler, miner_uids: list, simulation_input: SimulationInput, request_time: datetime, ): - if len(miner_uids) == 0: - bt.logging.error( - "No miners available", - "query_available_miners_and_save_responses", - ) - return - timeout = timeout_from_start_time( base_neuron.config.neuron.timeout, simulation_input.start_time ) @@ -277,65 +268,6 @@ def query_available_miners_and_save_responses( bt.logging.info("skip saving because no prediction") -def query_available_miners_and_save_responses_hft( - base_neuron: BaseValidatorNeuron, - miner_data_handler: MinerDataHandler, - miner_uids: list, - simulation_input_list: list[SimulationInput], - request_time: datetime, -): - if len(miner_uids) == 0: - bt.logging.error( - "No miners available", - "query_available_miners_and_save_responses_hft", - ) - return - - axons = [base_neuron.metagraph.axons[uid] for uid in miner_uids] - - start_time = time.time() - - synapses = asyncio.run( - base_neuron.dendrite.forward_hft( - axons=axons, - simulation_input_list=simulation_input_list, - ) - ) - - total_process_time = str(time.time() - start_time) - bt.logging.debug( - f"Forwarding took {total_process_time} seconds", "forward_hft" - ) - - miner_predictions = {} - for simulation_input in simulation_input_list: - miner_predictions[simulation_input.asset] = {} - - for synapse, idx in synapses: - response = synapse.deserialize() - process_time = synapse.dendrite.process_time - format_validation = validate_responses( - response, simulation_input_list[0], request_time, process_time - ) - # use index of synapse to get miner id - miner_id = miner_uids[idx] - miner_predictions[synapse.simulation_input.asset][miner_id] = ( - response, - format_validation, - process_time, - ) - - for simulation_input in simulation_input_list: - if len(miner_predictions[simulation_input.asset]) > 0: - miner_data_handler.save_responses( - miner_predictions[simulation_input.asset], - simulation_input, - request_time, - ) - else: - bt.logging.info("skip saving because no prediction") - - def get_available_miners_and_update_metagraph_history( base_neuron: BaseValidatorNeuron, miner_data_handler: MinerDataHandler, From b32098ef9a11410f7be34d7531f972a4af673e57 Mon Sep 17 00:00:00 2001 From: Thykof Date: Thu, 13 Nov 2025 18:17:15 +0100 Subject: [PATCH 09/24] fix unit test with numba --- tests/test_calculate_crps.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_calculate_crps.py b/tests/test_calculate_crps.py index ce28f379..d05fb065 100644 --- a/tests/test_calculate_crps.py +++ b/tests/test_calculate_crps.py @@ -121,7 +121,7 @@ def test_calculate_crps_for_miner_4(self): time_increment, ) - self.assertEqual(sum_all_scores, 13413.59914105867) + self.assertEqual(sum_all_scores, 13413.599141058676) def test_calculate_crps_for_miner_5(self): """ From 027d7a9b57843e7c2ae55caab03e1ce1daaf088e Mon Sep 17 00:00:00 2001 From: Thykof Date: Thu, 13 Nov 2025 18:17:31 +0100 Subject: [PATCH 10/24] clean temp code of step1 --- neurons/validator.py | 14 +-- synth/utils/helpers.py | 3 - synth/validator/forward.py | 37 +------- synth/validator/response_validation_v1.py | 103 ---------------------- 4 files changed, 5 insertions(+), 152 deletions(-) delete mode 100644 synth/validator/response_validation_v1.py diff --git a/neurons/validator.py b/neurons/validator.py index 8eee8806..6a884947 100644 --- a/neurons/validator.py +++ b/neurons/validator.py @@ -30,7 +30,6 @@ get_current_time, round_time_to_minutes, timeout_until, - more_paths_launch_time, ) from synth.utils.logging import setup_gcp_logging from synth.utils.opening_hours import should_skip_xau @@ -74,25 +73,25 @@ def __init__(self, config=None): asset="BTC", time_increment=300, time_length=86400, - num_simulations=100, + num_simulations=1000, ), SimulationInput( asset="ETH", time_increment=300, time_length=86400, - num_simulations=100, + num_simulations=1000, ), SimulationInput( asset="XAU", time_increment=300, time_length=86400, - num_simulations=100, + num_simulations=1000, ), SimulationInput( asset="SOL", time_increment=300, time_length=86400, - num_simulations=100, + num_simulations=1000, ), ] self.timeout_extra_seconds = 60 @@ -179,11 +178,6 @@ async def forward_prompt(self): # add the start time to the simulation input simulation_input.start_time = start_time.isoformat() - # TEMP - if request_time >= more_paths_launch_time: - simulation_input.num_simulations = 1000 - # END TEMP - await query_available_miners_and_save_responses( base_neuron=self, miner_data_handler=self.miner_data_handler, diff --git a/synth/utils/helpers.py b/synth/utils/helpers.py index 6aa8f92e..684a29c3 100644 --- a/synth/utils/helpers.py +++ b/synth/utils/helpers.py @@ -5,9 +5,6 @@ import numpy as np -more_paths_launch_time = datetime(2025, 11, 12, 14, 0, 0, 0, timezone.utc) - - def get_current_time() -> datetime: # Get current date and time return datetime.now(timezone.utc).replace(microsecond=0) diff --git a/synth/validator/forward.py b/synth/validator/forward.py index d76e7d29..5764d94c 100644 --- a/synth/validator/forward.py +++ b/synth/validator/forward.py @@ -19,7 +19,6 @@ from datetime import datetime, timedelta import random import time -import typing import sys import traceback @@ -35,7 +34,6 @@ get_current_time, timeout_from_start_time, convert_list_elements_to_str, - more_paths_launch_time, ) from synth.utils.uids import check_uid_availability from synth.validator.miner_data_handler import MinerDataHandler @@ -45,45 +43,12 @@ print_rewards_df, ) from synth.validator.price_data_provider import PriceDataProvider -from synth.validator.response_validation_v1 import ( - validate_responses as validate_responses_v1, -) from synth.validator.response_validation_v2 import ( validate_responses as validate_responses_v2, ) from synth.validator.reward import get_rewards, print_scores_df -# TEMP -def validate_responses( - response, - simulation_input: SimulationInput, - request_time: datetime, - process_time_str: typing.Optional[str], -) -> str: - if not isinstance(response, (list, tuple)): - return "Not a list nor tuple: " + str(type(response)) - - if len(response) == 0: - return "Empty list" - - first_element = response[0] - if isinstance(first_element, list): - if request_time >= more_paths_launch_time: - return "detected new format" - - return validate_responses_v1( - response, simulation_input, request_time, process_time_str - ) - else: - return validate_responses_v2( - response, simulation_input, request_time, process_time_str - ) - - -# END TEMP - - def send_weights_to_bittensor_and_update_weights_history( base_neuron: BaseValidatorNeuron, moving_averages_data: list[dict], @@ -247,7 +212,7 @@ async def query_available_miners_and_save_responses( response = synapse.deserialize() process_time = synapse.dendrite.process_time try: - format_validation = validate_responses( + format_validation = validate_responses_v2( response, simulation_input, request_time, process_time ) except Exception: diff --git a/synth/validator/response_validation_v1.py b/synth/validator/response_validation_v1.py deleted file mode 100644 index 95a35c8f..00000000 --- a/synth/validator/response_validation_v1.py +++ /dev/null @@ -1,103 +0,0 @@ -from datetime import datetime, timedelta -import typing - - -from synth.simulation_input import SimulationInput - -CORRECT = "CORRECT" - - -def datetime_valid(dt_str) -> bool: - try: - datetime.fromisoformat(dt_str) - except ValueError: - return False - return True - - -def validate_datetime( - dt_str, -) -> typing.Tuple[datetime, typing.Optional[str]]: - if not isinstance(dt_str, str): - return ( - datetime.now(), - f"Time format is incorrect: expected str, got {type(dt_str)}", - ) - if not datetime_valid(dt_str): - return ( - datetime.now(), - f"Time format is incorrect: expected isoformat, got {dt_str}", - ) - - return datetime.fromisoformat(dt_str), None - - -def validate_responses( # noqa: C901 - response, - simulation_input: SimulationInput, - request_time: datetime, - process_time_str: typing.Optional[str], -) -> str: - """ - Validate responses from miners. - - Return a string with the error message - if the response is not following the expected format or the response is empty, - otherwise, return "CORRECT". - """ - # check the process time - if process_time_str is None: - return "time out or internal server error (process time is None)" - - received_at = request_time + timedelta(seconds=float(process_time_str)) - start_time = datetime.fromisoformat(simulation_input.start_time) - if received_at > start_time: - return f"Response received after the simulation start time: expected {start_time}, got {received_at}" - - # check if the response is empty - if response is None or len(response) == 0: - return "Response is empty" - - # check the number of paths - if len(response) != simulation_input.num_simulations: - return f"Number of paths is incorrect: expected {simulation_input.num_simulations}, got {len(response)}" - - for path in response: - # check the number of time points - expected_time_points = ( - simulation_input.time_length // simulation_input.time_increment + 1 - ) - if len(path) != expected_time_points: - return f"Number of time points is incorrect: expected {expected_time_points}, got {len(path)}" - - # check the start time - first_time = path[0].get("time", "") - if first_time != simulation_input.start_time: - return f"Start time is incorrect: expected {simulation_input.start_time}, got {first_time}" - - for i in range(1, len(path)): - # check the time formats - i_minus_one_str_time = path[i - 1].get("time", "") - i_minus_one_datetime, error_message = validate_datetime( - i_minus_one_str_time - ) - if error_message: - return error_message - - i_str_time = path[i].get("time", "") - i_datetime, error_message = validate_datetime(i_str_time) - if error_message: - return error_message - - # check the time increment - expected_delta = timedelta(seconds=simulation_input.time_increment) - actual_delta = i_datetime - i_minus_one_datetime - if actual_delta != expected_delta: - return f"Time increment is incorrect: expected {expected_delta}, got {actual_delta}" - - # check the price format - price = path[i].get("price") - if not isinstance(price, (int, float)): - return f"Price format is incorrect: expected int or float, got {type(price)}" - - return CORRECT From 78228ded6b268c4e9035b9387553f707e9ab2740 Mon Sep 17 00:00:00 2001 From: Thykof Date: Thu, 13 Nov 2025 18:28:28 +0100 Subject: [PATCH 11/24] install numba --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 1555216d..5ef8b553 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,3 +20,4 @@ google-cloud-logging==3.12.1 uvloop==0.21.0 httpx==0.28.1 httpx[http2] +numba==0.62.1 From 76524b581865d131b132465779c63a65db983924 Mon Sep 17 00:00:00 2001 From: Thykof Date: Fri, 21 Nov 2025 16:06:57 +0100 Subject: [PATCH 12/24] refactor forward with scheduler --- neurons/miner.py | 2 +- neurons/validator.py | 190 ++++++++++++-------------- synth/base/neuron.py | 2 +- synth/base/validator.py | 64 +-------- synth/miner/run.py | 2 +- synth/utils/helpers.py | 16 --- synth/validator/forward.py | 2 +- synth/validator/miner_data_handler.py | 23 ++++ tests/test_helpers.py | 25 +--- 9 files changed, 116 insertions(+), 210 deletions(-) diff --git a/neurons/miner.py b/neurons/miner.py index ab85ece5..616a71df 100644 --- a/neurons/miner.py +++ b/neurons/miner.py @@ -173,7 +173,7 @@ def load_state(self): def set_weights(self): pass - async def forward_validator(self): + def forward_validator(self): pass def print_info(self): diff --git a/neurons/validator.py b/neurons/validator.py index 10096014..c2709f67 100644 --- a/neurons/validator.py +++ b/neurons/validator.py @@ -1,9 +1,10 @@ # The MIT License (MIT) # Copyright © 2023 Yuma Rao # Copyright © 2023 Mode Labs -import asyncio from datetime import datetime, timedelta import multiprocessing as mp +import sched +import time # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the “Software”), to deal in the Software without restriction, including without limitation @@ -29,7 +30,6 @@ from synth.utils.helpers import ( get_current_time, round_time_to_minutes, - timeout_until, ) from synth.utils.logging import setup_gcp_logging from synth.utils.opening_hours import should_skip_xau @@ -46,6 +46,9 @@ load_dotenv() +LOW_FREQUENCY_TIME_LENGTH = 86400 +HIGH_FREQUENCY_TIME_LENGTH = 3600 + class Validator(BaseValidatorNeuron): """ @@ -67,43 +70,20 @@ def __init__(self, config=None): self.miner_data_handler = MinerDataHandler() self.price_data_provider = PriceDataProvider() - self.simulation_input_list = [ - # input data: give me prediction of BTC price for the next 1 day for every 5 min of time - SimulationInput( - asset="BTC", - time_increment=300, - time_length=86400, - num_simulations=1000, - ), - SimulationInput( - asset="ETH", - time_increment=300, - time_length=86400, - num_simulations=1000, - ), - SimulationInput( - asset="XAU", - time_increment=300, - time_length=86400, - num_simulations=1000, - ), - SimulationInput( - asset="SOL", - time_increment=300, - time_length=86400, - num_simulations=1000, - ), - ] + self.scheduler = sched.scheduler(time.time, time.sleep) + + self.asset_list = ["BTC", "ETH", "XAU", "SOL"] + self.timeout_extra_seconds = 60 self.assert_assets_supported() def assert_assets_supported(self): # Assert assets are all implemented in the price data provider: - for simulation in self.simulation_input_list: - assert simulation.asset in PriceDataProvider.TOKEN_MAP + for asset in self.asset_list: + assert asset in PriceDataProvider.TOKEN_MAP - async def forward_validator(self): + def forward_validator(self): """ Validator forward pass. Consists of: - Generating the query @@ -112,93 +92,95 @@ async def forward_validator(self): - Rewarding the miners - Updating the scores """ - bt.logging.info("calling forward_validator()") - return [ - asyncio.create_task(self.forward_prompt()), - ] + self.schedule_low_frequency(get_current_time(), True) + self.scheduler.run() - async def wait_till_next_simulation( - self, request_time: datetime, simulation_input_list: list + def schedule_low_frequency( + self, cycle_start_time: datetime, immediately: bool = False ): - # wait until the next simulation - next_iteration = request_time + timedelta( - minutes=60 / len(simulation_input_list) + asset = self.asset_list[0] + + latest_asset = self.miner_data_handler.get_latest_asset( + LOW_FREQUENCY_TIME_LENGTH ) - wait_time = timeout_until(next_iteration) + if latest_asset is not None and latest_asset in self.asset_list: + latest_index = self.asset_list.index(latest_asset) + asset = self.asset_list[(latest_index + 1) % len(self.asset_list)] + + delay = 0 + if not immediately: + next_cycle = cycle_start_time + timedelta( + minutes=60 / len(self.asset_list) + ) + delay = (next_cycle - get_current_time()).total_seconds() + bt.logging.info( - f"Waiting for {wait_time/60} minutes until the next simulation", - "forward_prompt", + f"Scheduling next low frequency cycle for asset {asset} in {delay} seconds" + ) + self.scheduler.enter( + delay=delay, + priority=1, + action=self.cycle_flow_frequency, + argument=(asset,), ) - await asyncio.sleep(wait_time) - - async def forward_prompt(self): - for simulation_input in self.simulation_input_list: - # ================= Step 1 ================= # - # Getting available miners from metagraph and saving information about them - # and their properties (rank, incentives, emission) at the current moment in the database - # in the metagraph_history table and in the miners table - # ========================================== # - - miner_uids = get_available_miners_and_update_metagraph_history( - base_neuron=self, - miner_data_handler=self.miner_data_handler, - ) - if len(miner_uids) == 0: - bt.logging.error( - "No miners available", - "forward_prompt", - ) - await self.forward_score() - await self.wait_till_next_simulation( - get_current_time(), self.simulation_input_list - ) - continue - - request_time = get_current_time() - start_time = round_time_to_minutes( - request_time, 60, self.timeout_extra_seconds - ) + def cycle_flow_frequency(self, asset: str): + cycle_start_time = get_current_time() + + self.forward_prompt_low_frequency(asset) + self.forward_score_low_frequency() + # self.cleanup_history() + self.schedule_low_frequency(cycle_start_time) + + def forward_prompt_low_frequency(self, asset: str): + self.sync() + miner_uids = get_available_miners_and_update_metagraph_history( + base_neuron=self, + miner_data_handler=self.miner_data_handler, + ) - if should_skip_xau(start_time) and simulation_input.asset == "XAU": - bt.logging.info( - "Skipping XAU simulation as market is closed", - "forward_prompt", - ) - await self.forward_score() - await self.wait_till_next_simulation( - request_time, self.simulation_input_list - ) - continue - - # ================= Step 2 ================= # - # Query all the available miners and save all their responses - # in the database in miner_predictions table - # ========================================== # - - # add the start time to the simulation input - simulation_input.start_time = start_time.isoformat() - - await query_available_miners_and_save_responses( - base_neuron=self, - miner_data_handler=self.miner_data_handler, - miner_uids=miner_uids, - simulation_input=simulation_input, - request_time=request_time, + if len(miner_uids) == 0: + bt.logging.error( + "No miners available", + "forward_prompt", ) + return - await self.forward_score() - await self.wait_till_next_simulation( - request_time, self.simulation_input_list + request_time = get_current_time() + start_time: datetime = round_time_to_minutes( + request_time, self.timeout_extra_seconds + ) + + if should_skip_xau(start_time) and asset == "XAU": + bt.logging.info( + "Skipping XAU simulation as market is closed", + "forward_prompt", ) + return + + simulation_input = SimulationInput( + asset=asset, + start_time=start_time.isoformat(), + time_increment=300, + time_length=LOW_FREQUENCY_TIME_LENGTH, + num_simulations=1000, + ) + + query_available_miners_and_save_responses( + base_neuron=self, + miner_data_handler=self.miner_data_handler, + miner_uids=miner_uids, + simulation_input=simulation_input, + request_time=request_time, + ) - async def forward_score(self): + def forward_score_low_frequency(self): current_time = get_current_time() # round current time to the closest minute and add extra minutes # to be sure we are after the start time of the prompt - scored_time = round_time_to_minutes( - current_time, 60, self.timeout_extra_seconds * 2 + scored_time: datetime = round_time_to_minutes( + current_time, self.timeout_extra_seconds * 2 ) # ================= Step 3 ================= # diff --git a/synth/base/neuron.py b/synth/base/neuron.py index 8364fc52..57e2c234 100644 --- a/synth/base/neuron.py +++ b/synth/base/neuron.py @@ -102,7 +102,7 @@ def __init__(self, config=None): async def forward_miner(self, synapse: bt.Synapse) -> bt.Synapse: ... @abstractmethod - async def forward_validator(self): ... + def forward_validator(self): ... @abstractmethod def resync_metagraph(self): ... diff --git a/synth/base/validator.py b/synth/base/validator.py index 5c6cd09e..1544db1c 100644 --- a/synth/base/validator.py +++ b/synth/base/validator.py @@ -104,10 +104,6 @@ def serve_axon(self): f"Failed to create Axon initialize with exception: {e}" ) - async def concurrent_forward(self): - coroutines = await self.forward_validator() - await asyncio.gather(*coroutines) - def run(self): """ Initiates and manages the main loop for the miner on the Bittensor network. The main loop handles graceful shutdown on keyboard interrupts and logs unforeseen errors. @@ -129,26 +125,14 @@ def run(self): """ bt.logging.info(f"Validator starting at block: {self.block}") - # This loop maintains the validator's operations until intentionally stopped. try: - while True: - # Run multiple forwards concurrently. - self.loop.run_until_complete(self.concurrent_forward()) - - # Check if we should exit. - if self.should_exit: - break - - # Sync metagraph and potentially set weights. - self.sync() - - self.step += 1 - + self.forward_validator() # If someone intentionally stops the validator, it'll safely terminate operations. except KeyboardInterrupt: if not self.config.neuron.axon_off: self.axon.stop() bt.logging.success("Validator killed by keyboard interrupt.") + traceback.print_exc(file=sys.stderr) exit() # In case of unforeseen errors, the validator will log the error and continue operations. @@ -156,50 +140,6 @@ def run(self): bt.logging.error(f"Error during validation: {str(err)}") traceback.print_exc(file=sys.stderr) - def run_in_background_thread(self): - """ - Starts the validator's operations in a background thread upon entering the context. - This method facilitates the use of the validator in a 'with' statement. - """ - if not self.is_running: - bt.logging.debug("Starting validator in background thread.") - self.should_exit = False - self.thread = threading.Thread(target=self.run, daemon=True) - self.thread.start() - self.is_running = True - bt.logging.debug("Started") - - def stop_run_thread(self): - """ - Stops the validator's operations that are running in the background thread. - """ - if self.is_running: - bt.logging.debug("Stopping validator in background thread.") - self.should_exit = True - if self.thread is not None: - self.thread.join(5) - self.is_running = False - bt.logging.debug("Stopped") - - def __enter__(self): - self.run_in_background_thread() - return self - - def __exit__(self, exc_type, exc_value, traceback): - """ - Stops the validator's background operations upon exiting the context. - This method facilitates the use of the validator in a 'with' statement. - - Args: - exc_type: The type of the exception that caused the context to be exited. - None if the context was exited without an exception. - exc_value: The instance of the exception that caused the context to be exited. - None if the context was exited without an exception. - traceback: A traceback object encoding the stack trace. - None if the context was exited without an exception. - """ - self.stop_run_thread() - def set_weights(self): """ Sets the validator weights to the metagraph hotkeys based on the scores it has received from the miners. The weights determine the trust and incentive level the validator assigns to miner nodes on the network. diff --git a/synth/miner/run.py b/synth/miner/run.py index ee5eff4f..5eb8bcb1 100644 --- a/synth/miner/run.py +++ b/synth/miner/run.py @@ -17,7 +17,7 @@ ) current_time = get_current_time() - start_time = round_time_to_minutes(current_time, 60, 120) + start_time = round_time_to_minutes(current_time, 120) simulation_input.start_time = start_time.isoformat() print("start_time", simulation_input.start_time) diff --git a/synth/utils/helpers.py b/synth/utils/helpers.py index 9975e76e..d5dd647e 100644 --- a/synth/utils/helpers.py +++ b/synth/utils/helpers.py @@ -140,21 +140,5 @@ def timeout_from_start_time( return (start_time - current_time).total_seconds() -def timeout_until(until_time: datetime): - """ - Calculate the timeout duration from the current time to the until_time. - - :param until_time: datetime object representing the end time. - :return: Timeout duration in seconds. - """ - # Get current date and time - current_time = datetime.now(timezone.utc) - - # Calculate the timeout duration - wait_time = (until_time - current_time).total_seconds() - - return wait_time if wait_time > 0 else 0 - - def convert_list_elements_to_str(items: list[int]) -> list[str]: return [str(x) for x in items] diff --git a/synth/validator/forward.py b/synth/validator/forward.py index 5764d94c..a48ef0ef 100644 --- a/synth/validator/forward.py +++ b/synth/validator/forward.py @@ -163,7 +163,7 @@ def calculate_rewards_and_update_scores( return fail_count != len(validator_requests) -async def query_available_miners_and_save_responses( +def query_available_miners_and_save_responses( base_neuron: BaseValidatorNeuron, miner_data_handler: MinerDataHandler, miner_uids: list, diff --git a/synth/validator/miner_data_handler.py b/synth/validator/miner_data_handler.py index 813f21b6..50e2244a 100644 --- a/synth/validator/miner_data_handler.py +++ b/synth/validator/miner_data_handler.py @@ -84,6 +84,29 @@ def get_miner_ids_map(self, connection: Connection): return miner_Uid_map + def get_latest_asset(self, time_length: int) -> str | None: + try: + with self.engine.connect() as connection: + query = ( + select( + ValidatorRequest.asset, + ) + .where(ValidatorRequest.time_length == time_length) + .limit(1) + .order_by(ValidatorRequest.start_time.desc()) + ) + + result = connection.execute(query).fetchall() + if len(result) == 0: + return None + + # Return the asset with the least count + return result[0].asset + except Exception as e: + bt.logging.error(f"in get_next_asset (got an exception): {e}") + traceback.print_exc(file=sys.stderr) + return None + @retry( stop=stop_after_attempt(5), wait=wait_random_exponential(multiplier=7), diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 6f2ccc39..d6dc823f 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -1,5 +1,5 @@ import unittest -from datetime import datetime, timedelta, timezone +from datetime import datetime from synth.utils.helpers import ( @@ -9,7 +9,6 @@ from_iso_to_unix_time, get_current_time, round_to_8_significant_digits, - timeout_until, ) @@ -140,25 +139,3 @@ def test_from_iso_to_unix_time(self): self.assertEqual( from_iso_to_unix_time("2025-08-05T14:56:00+00:00"), 1754405760 ) - - def test_timeout_until(self): - # Arrange: Set a future time 10 seconds from now - future_time = datetime.now(timezone.utc) + timedelta(seconds=10) - - # Act: Call the timeout_until function - timeout = timeout_until(future_time) - - # Assert: The timeout should be approximately 10 seconds - assert ( - 9 <= timeout <= 10 - ), f"Expected timeout to be around 10 seconds, got {timeout}" - - def test_timeout_until_past_time(self): - # Arrange: Set a past time 10 seconds ago - past_time = datetime.now(timezone.utc) - timedelta(seconds=10) - - # Act: Call the timeout_until function - timeout = timeout_until(past_time) - - # Assert: The timeout should be negative - assert timeout == 0, f"Expected timeout to be 0, got {timeout}" From 5b8b018b48d129396ad671847e70a545b04187e3 Mon Sep 17 00:00:00 2001 From: Thykof Date: Fri, 21 Nov 2025 16:45:24 +0100 Subject: [PATCH 13/24] high frequency --- neurons/validator.py | 110 ++++++++++++++++++-------- synth/validator/forward.py | 4 +- synth/validator/miner_data_handler.py | 2 +- tests/test_miner_data_handler.py | 28 +++++-- tests/test_rewards.py | 4 +- 5 files changed, 106 insertions(+), 42 deletions(-) diff --git a/neurons/validator.py b/neurons/validator.py index c2709f67..c91ba7ed 100644 --- a/neurons/validator.py +++ b/neurons/validator.py @@ -5,6 +5,7 @@ import multiprocessing as mp import sched import time +from dataclasses import dataclass # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the “Software”), to deal in the Software without restriction, including without limitation @@ -46,8 +47,34 @@ load_dotenv() -LOW_FREQUENCY_TIME_LENGTH = 86400 -HIGH_FREQUENCY_TIME_LENGTH = 3600 + +@dataclass +class PromptConfig: + label: str + time_length: int + time_increment: int + initial_delay: int + total_cycle_minutes: int + timeout_extra_seconds: int + + +LOW_FREQUENCY = PromptConfig( + label="low", + time_length=86400, + time_increment=300, + initial_delay=60, # avoid 2 prompts to start simultaneously + total_cycle_minutes=60, + timeout_extra_seconds=60, +) + +HIGH_FREQUENCY = PromptConfig( + label="high", + time_length=3600, + time_increment=60, + initial_delay=0, + total_cycle_minutes=12, + timeout_extra_seconds=60, +) class Validator(BaseValidatorNeuron): @@ -71,11 +98,9 @@ def __init__(self, config=None): self.price_data_provider = PriceDataProvider() self.scheduler = sched.scheduler(time.time, time.sleep) - + self.miner_uids: list[int] = [] self.asset_list = ["BTC", "ETH", "XAU", "SOL"] - self.timeout_extra_seconds = 60 - self.assert_assets_supported() def assert_assets_supported(self): @@ -92,54 +117,82 @@ def forward_validator(self): - Rewarding the miners - Updating the scores """ - self.schedule_low_frequency(get_current_time(), True) + self.miner_uids = get_available_miners_and_update_metagraph_history( + base_neuron=self, + miner_data_handler=self.miner_data_handler, + ) + self.schedule_cycle(get_current_time(), HIGH_FREQUENCY, True) + self.schedule_cycle(get_current_time(), LOW_FREQUENCY, True) self.scheduler.run() - def schedule_low_frequency( - self, cycle_start_time: datetime, immediately: bool = False + def schedule_cycle( + self, + cycle_start_time: datetime, + prompt_config: PromptConfig, + immediately: bool = False, ): asset = self.asset_list[0] latest_asset = self.miner_data_handler.get_latest_asset( - LOW_FREQUENCY_TIME_LENGTH + prompt_config.time_length ) if latest_asset is not None and latest_asset in self.asset_list: latest_index = self.asset_list.index(latest_asset) asset = self.asset_list[(latest_index + 1) % len(self.asset_list)] - delay = 0 + delay = prompt_config.initial_delay if not immediately: next_cycle = cycle_start_time + timedelta( - minutes=60 / len(self.asset_list) + minutes=prompt_config.total_cycle_minutes + / len(self.asset_list) ) delay = (next_cycle - get_current_time()).total_seconds() bt.logging.info( - f"Scheduling next low frequency cycle for asset {asset} in {delay} seconds" + f"Scheduling next {prompt_config.label} frequency cycle for asset {asset} in {delay} seconds" + ) + + method = ( + self.cycle_low_frequency + if prompt_config.label == "low" + else self.cycle_high_frequency ) self.scheduler.enter( delay=delay, priority=1, - action=self.cycle_flow_frequency, + action=method, argument=(asset,), ) - def cycle_flow_frequency(self, asset: str): + def cycle_low_frequency(self, asset: str): cycle_start_time = get_current_time() - self.forward_prompt_low_frequency(asset) + self.sync() + self.miner_uids = get_available_miners_and_update_metagraph_history( + base_neuron=self, + miner_data_handler=self.miner_data_handler, + ) + self.forward_prompt(asset, LOW_FREQUENCY) self.forward_score_low_frequency() # self.cleanup_history() - self.schedule_low_frequency(cycle_start_time) + self.schedule_cycle(cycle_start_time, LOW_FREQUENCY) - def forward_prompt_low_frequency(self, asset: str): - self.sync() - miner_uids = get_available_miners_and_update_metagraph_history( - base_neuron=self, + def cycle_high_frequency(self, asset: str): + cycle_start_time = get_current_time() + self.forward_prompt(asset, HIGH_FREQUENCY) + + current_time = get_current_time() + scored_time: datetime = round_time_to_minutes(current_time) + calculate_rewards_and_update_scores( miner_data_handler=self.miner_data_handler, + price_data_provider=self.price_data_provider, + scored_time=scored_time, + cutoff_days=self.config.ewma.cutoff_days, ) + self.schedule_cycle(cycle_start_time, HIGH_FREQUENCY) - if len(miner_uids) == 0: + def forward_prompt(self, asset: str, prompt_config: PromptConfig): + if len(self.miner_uids) == 0: bt.logging.error( "No miners available", "forward_prompt", @@ -148,7 +201,7 @@ def forward_prompt_low_frequency(self, asset: str): request_time = get_current_time() start_time: datetime = round_time_to_minutes( - request_time, self.timeout_extra_seconds + request_time, prompt_config.timeout_extra_seconds ) if should_skip_xau(start_time) and asset == "XAU": @@ -161,27 +214,22 @@ def forward_prompt_low_frequency(self, asset: str): simulation_input = SimulationInput( asset=asset, start_time=start_time.isoformat(), - time_increment=300, - time_length=LOW_FREQUENCY_TIME_LENGTH, + time_increment=prompt_config.time_increment, + time_length=prompt_config.time_length, num_simulations=1000, ) query_available_miners_and_save_responses( base_neuron=self, miner_data_handler=self.miner_data_handler, - miner_uids=miner_uids, + miner_uids=self.miner_uids, simulation_input=simulation_input, request_time=request_time, ) def forward_score_low_frequency(self): current_time = get_current_time() - - # round current time to the closest minute and add extra minutes - # to be sure we are after the start time of the prompt - scored_time: datetime = round_time_to_minutes( - current_time, self.timeout_extra_seconds * 2 - ) + scored_time: datetime = round_time_to_minutes(current_time) # ================= Step 3 ================= # # Calculate rewards based on historical predictions data diff --git a/synth/validator/forward.py b/synth/validator/forward.py index a48ef0ef..9168dacf 100644 --- a/synth/validator/forward.py +++ b/synth/validator/forward.py @@ -87,7 +87,7 @@ def calculate_moving_average_and_update_rewards( miner_data_handler: MinerDataHandler, scored_time: datetime, cutoff_days: int, - window_days: float, + window_days: int, softmax_beta: float, ) -> list[dict]: # apply custom moving average rewards @@ -123,7 +123,7 @@ def calculate_rewards_and_update_scores( cutoff_days: int, ) -> bool: # get latest prediction request from validator - validator_requests = miner_data_handler.get_latest_prediction_requests( + validator_requests = miner_data_handler.get_validator_requests_to_score( scored_time, cutoff_days ) diff --git a/synth/validator/miner_data_handler.py b/synth/validator/miner_data_handler.py index 50e2244a..08ab3d88 100644 --- a/synth/validator/miner_data_handler.py +++ b/synth/validator/miner_data_handler.py @@ -340,7 +340,7 @@ def get_miner_prediction( traceback.print_exc(file=sys.stderr) return None - def get_latest_prediction_requests( + def get_validator_requests_to_score( self, scored_time: datetime, cutoff_days: int, diff --git a/tests/test_miner_data_handler.py b/tests/test_miner_data_handler.py index 4f33a6a2..18e5f9b1 100644 --- a/tests/test_miner_data_handler.py +++ b/tests/test_miner_data_handler.py @@ -56,7 +56,9 @@ def test_get_values_within_range(db_engine: Engine): handler = MinerDataHandler(db_engine) handler.save_responses(simulation_data, simulation_input, datetime.now()) - validator_requests = handler.get_latest_prediction_requests(scored_time, 7) + validator_requests = handler.get_validator_requests_to_score( + scored_time, 7 + ) assert len(validator_requests) == 1 result = handler.get_miner_prediction(miner_uid, validator_requests[0].id) @@ -110,7 +112,9 @@ def test_get_values_ongoing_range(db_engine: Engine): handler = MinerDataHandler(db_engine) handler.save_responses(simulation_data, simulation_input, datetime.now()) - validator_requests = handler.get_latest_prediction_requests(scored_time, 7) + validator_requests = handler.get_validator_requests_to_score( + scored_time, 7 + ) assert len(validator_requests) == 0 @@ -176,7 +180,9 @@ def test_multiple_records_for_same_miner(db_engine: Engine): simulation_data_2, simulation_input_2, datetime.now() ) - validator_requests = handler.get_latest_prediction_requests(scored_time, 7) + validator_requests = handler.get_validator_requests_to_score( + scored_time, 7 + ) assert len(validator_requests) == 2 result = handler.get_miner_prediction(miner_uid, validator_requests[1].id) @@ -258,7 +264,9 @@ def test_multiple_records_for_same_miner_with_overlapping(db_engine: Engine): simulation_data_2, simulation_input_2, datetime.now() ) - validator_requests = handler.get_latest_prediction_requests(scored_time, 7) + validator_requests = handler.get_validator_requests_to_score( + scored_time, 7 + ) assert len(validator_requests) == 1 result = handler.get_miner_prediction(miner_uid, validator_requests[0].id) @@ -285,7 +293,9 @@ def test_no_data_for_miner(db_engine: Engine): handler = MinerDataHandler(db_engine) - validator_requests = handler.get_latest_prediction_requests(scored_time, 7) + validator_requests = handler.get_validator_requests_to_score( + scored_time, 7 + ) assert len(validator_requests) == 0 @@ -322,7 +332,9 @@ def test_get_values_incorrect_format(db_engine: Engine): handler = MinerDataHandler(db_engine) handler.save_responses(simulation_data, simulation_input, datetime.now()) - validator_requests = handler.get_latest_prediction_requests(scored_time, 7) + validator_requests = handler.get_validator_requests_to_score( + scored_time, 7 + ) assert len(validator_requests) == 1 result = handler.get_miner_prediction(miner_uid, validator_requests[0].id) @@ -340,7 +352,9 @@ def test_set_get_scores(db_engine: Engine): scored_time = datetime.fromisoformat("2024-11-27T00:00:00+00:00") handler, _, _ = prepare_random_predictions(db_engine, start_time) - validator_requests = handler.get_latest_prediction_requests(scored_time, 7) + validator_requests = handler.get_validator_requests_to_score( + scored_time, 7 + ) assert len(validator_requests) == 1 diff --git a/tests/test_rewards.py b/tests/test_rewards.py index 3e06f888..c33d10cb 100644 --- a/tests/test_rewards.py +++ b/tests/test_rewards.py @@ -78,7 +78,9 @@ def test_get_rewards(db_engine): price_data_provider = PriceDataProvider() - validator_requests = handler.get_latest_prediction_requests(scored_time, 7) + validator_requests = handler.get_validator_requests_to_score( + scored_time, 7 + ) prompt_scores, detailed_info, real_prices = get_rewards( handler, From 6cac5f66039ee91bf8fd4b6c320eb0031386ff25 Mon Sep 17 00:00:00 2001 From: Thykof Date: Thu, 27 Nov 2025 17:37:36 +0100 Subject: [PATCH 14/24] crps for HTF --- neurons/validator.py | 35 +++--------------- synth/validator/crps_calculation.py | 21 ++++++----- synth/validator/prompt_config.py | 57 +++++++++++++++++++++++++++++ synth/validator/reward.py | 17 +++++---- tests/test_calculate_crps.py | 41 +++++++++++++++++++++ 5 files changed, 125 insertions(+), 46 deletions(-) create mode 100644 synth/validator/prompt_config.py diff --git a/neurons/validator.py b/neurons/validator.py index c91ba7ed..5ddd9de9 100644 --- a/neurons/validator.py +++ b/neurons/validator.py @@ -5,7 +5,6 @@ import multiprocessing as mp import sched import time -from dataclasses import dataclass # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the “Software”), to deal in the Software without restriction, including without limitation @@ -43,40 +42,16 @@ ) from synth.validator.miner_data_handler import MinerDataHandler from synth.validator.price_data_provider import PriceDataProvider +from synth.validator.prompt_config import ( + PromptConfig, + LOW_FREQUENCY, + HIGH_FREQUENCY, +) load_dotenv() -@dataclass -class PromptConfig: - label: str - time_length: int - time_increment: int - initial_delay: int - total_cycle_minutes: int - timeout_extra_seconds: int - - -LOW_FREQUENCY = PromptConfig( - label="low", - time_length=86400, - time_increment=300, - initial_delay=60, # avoid 2 prompts to start simultaneously - total_cycle_minutes=60, - timeout_extra_seconds=60, -) - -HIGH_FREQUENCY = PromptConfig( - label="high", - time_length=3600, - time_increment=60, - initial_delay=0, - total_cycle_minutes=12, - timeout_extra_seconds=60, -) - - class Validator(BaseValidatorNeuron): """ Your validator neuron class. You should use this class to define your validator's behavior. In particular, you should replace the forward function with your own logic. diff --git a/synth/validator/crps_calculation.py b/synth/validator/crps_calculation.py index ce77dd3f..8cf031a4 100644 --- a/synth/validator/crps_calculation.py +++ b/synth/validator/crps_calculation.py @@ -1,14 +1,6 @@ import numpy as np from properscoring import crps_ensemble -# Define scoring intervals in seconds -scoring_intervals = { - "5min": 300, # 5 minutes - "30min": 1800, # 30 minutes - "3hour": 10800, # 3 hours - "24hour_abs": 86400, # 24 hours -} - def get_interval_steps(scoring_interval: int, time_increment: int) -> int: """ @@ -21,6 +13,7 @@ def calculate_crps_for_miner( simulation_runs: np.ndarray, real_price_path: np.ndarray, time_increment: int, + scoring_intervals: dict[str, int], ) -> tuple[float, list[dict]]: """ Calculate the total CRPS score for a miner's simulations over specified intervals, @@ -43,6 +36,7 @@ def calculate_crps_for_miner( for interval_name, interval_seconds in scoring_intervals.items(): interval_steps = get_interval_steps(interval_seconds, time_increment) absolute_price = interval_name.endswith("_abs") + is_gap = interval_name.endswith("_gap") # If we are considering absolute prices, adjust the interval steps for potential gaps: # if only the initial price is present, then decrease the interval step @@ -64,11 +58,13 @@ def calculate_crps_for_miner( simulation_runs, interval_steps, absolute_price, + is_gap, ) real_changes = calculate_price_changes_over_intervals( real_price_path.reshape(1, -1), interval_steps, absolute_price, + is_gap, ) data_blocks = label_observed_blocks(real_changes[0]) @@ -146,7 +142,10 @@ def label_observed_blocks(arr: np.ndarray) -> np.ndarray: def calculate_price_changes_over_intervals( - price_paths: np.ndarray, interval_steps: int, absolute_price=False + price_paths: np.ndarray, + interval_steps: int, + absolute_price=False, + is_gap=False, ) -> np.ndarray: """ Calculate price changes over specified intervals. @@ -160,7 +159,11 @@ def calculate_price_changes_over_intervals( numpy.ndarray: Array of price changes over intervals. """ # Get the prices at the interval points + # [1, 2, 3, 4, 5, 6, 7] -> [1, 3, 5, 7] if interval_steps is 2 interval_prices = price_paths[:, ::interval_steps] + if is_gap: + # [1, 2, 3, 4, 5, 6, 7] -> [1, 3] if interval_steps is 2 + interval_prices = interval_prices[:1] # Calculate price changes over intervals if absolute_price: diff --git a/synth/validator/prompt_config.py b/synth/validator/prompt_config.py new file mode 100644 index 00000000..45477497 --- /dev/null +++ b/synth/validator/prompt_config.py @@ -0,0 +1,57 @@ +from dataclasses import dataclass + + +@dataclass +class PromptConfig: + label: str + time_length: int + time_increment: int + initial_delay: int + total_cycle_minutes: int + timeout_extra_seconds: int + scoring_intervals: dict[str, int] # Define scoring intervals in seconds. + + +LOW_FREQUENCY = PromptConfig( + label="low", + time_length=86400, + time_increment=300, + initial_delay=60, # avoid 2 prompts to start simultaneously + total_cycle_minutes=60, + timeout_extra_seconds=60, + scoring_intervals={ + "5min": 300, # 5 minutes + "30min": 1800, # 30 minutes + "3hour": 10800, # 3 hours + "24hour_abs": 86400, # 24 hours + }, +) + +HIGH_FREQUENCY = PromptConfig( + label="high", + time_length=3600, + time_increment=60, + initial_delay=0, + total_cycle_minutes=12, + timeout_extra_seconds=60, + scoring_intervals={ + "1min": 60, + "2min": 120, + "5min": 300, + "15min": 900, + "30min": 1800, + "60min_abs": 3600, + "0_5min_gaps": 300, + "0_10min_gaps": 600, + "0_15min_gaps": 900, + "0_20min_gaps": 1200, + "0_25min_gaps": 1500, + "0_30min_gaps": 300, + "0_35min_gaps": 300, + "0_40min_gaps": 300, + "0_45min_gaps": 300, + "0_50min_gaps": 300, + "0_55min_gaps": 300, + "0_60min_gaps": 300, + }, +) diff --git a/synth/validator/reward.py b/synth/validator/reward.py index a5b2fee5..0bb35e15 100644 --- a/synth/validator/reward.py +++ b/synth/validator/reward.py @@ -31,13 +31,13 @@ from synth.validator.miner_data_handler import MinerDataHandler from synth.validator.price_data_provider import PriceDataProvider from synth.validator import response_validation_v2 +from synth.validator import prompt_config def reward( miner_data_handler: MinerDataHandler, miner_uid: int, - time_increment: int, - validator_request_id: int, + validator_request: ValidatorRequest, real_prices: list[float], ): """ @@ -47,9 +47,8 @@ def reward( Returns: - float: The reward value for the miner. """ - miner_prediction = miner_data_handler.get_miner_prediction( - miner_uid, validator_request_id + miner_uid, validator_request.id ) if miner_prediction is None: @@ -65,11 +64,16 @@ def reward( predictions_path = adjust_predictions(miner_prediction.prediction) simulation_runs = np.array(predictions_path).astype(float) + scoring_intervals = prompt_config.LOW_FREQUENCY.scoring_intervals + if validator_request.time_length == prompt_config.HIGH_FREQUENCY.time_length: + scoring_intervals = prompt_config.HIGH_FREQUENCY.scoring_intervals + try: score, detailed_crps_data = calculate_crps_for_miner( simulation_runs, np.array(real_prices), - time_increment, + validator_request.time_increment, + scoring_intervals, ) except Exception as e: bt.logging.error( @@ -127,8 +131,7 @@ def get_rewards( score, detailed_crps_data, miner_prediction = reward( miner_data_handler, miner_uid, - validator_request.time_increment, - validator_request.id, + validator_request, real_prices, ) scores.append(score) diff --git a/tests/test_calculate_crps.py b/tests/test_calculate_crps.py index d05fb065..de6f8193 100644 --- a/tests/test_calculate_crps.py +++ b/tests/test_calculate_crps.py @@ -2,6 +2,7 @@ import numpy as np +from synth.validator import prompt_config from synth.validator.crps_calculation import ( calculate_crps_for_miner, label_observed_blocks, @@ -19,6 +20,7 @@ def test_calculate_crps_for_miner_1(self): np.array(predictions_path), np.array(real_price_path), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, ) self.assertEqual(sum_all_scores, 284.1200564488584) @@ -31,6 +33,7 @@ def test_calculate_crps_for_miner_1_b(self): np.array(predictions_path), np.array(real_price_path), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, ) self.assertEqual(sum_all_scores, 284.1200564488584) @@ -43,6 +46,7 @@ def test_calculate_crps_for_miner_zero(self): np.array(predictions_path), np.array(real_price_path), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, ) self.assertEqual(sum_all_scores, 0) @@ -55,6 +59,7 @@ def test_calculate_crps_for_miner_2(self): np.array([predictions_path]), np.array(real_price_path), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, ) self.assertEqual(sum_all_scores, 479.6904902048716) @@ -68,6 +73,7 @@ def test_calculate_crps_for_miner_3(self): np.array([predictions_path]), np.array(real_price_path), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, ) self.assertEqual(sum_all_scores, 4737.272133130346) @@ -119,6 +125,7 @@ def test_calculate_crps_for_miner_4(self): np.array(predictions_path), np.array(real_price_path), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, ) self.assertEqual(sum_all_scores, 13413.599141058676) @@ -135,6 +142,7 @@ def test_calculate_crps_for_miner_5(self): np.array([predictions_path]), np.array(real_price_path), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, ) self.assertEqual(sum_all_scores, 0.0) @@ -151,6 +159,7 @@ def test_calculate_crps_for_miner_6(self): np.array([predictions_path]), np.array(real_price_path), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, ) self.assertEqual(sum_all_scores, 0.0) @@ -167,6 +176,7 @@ def test_calculate_crps_for_miner_7(self): np.array([predictions_path]), np.array(real_price_path), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, ) self.assertEqual(sum_all_scores, 0.0) @@ -193,6 +203,7 @@ def test_calculate_crps_for_miner_8(self): np.array([predictions_path]), np.array(real_price_path), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, ) self.assertEqual(sum_all_scores, 0.0) @@ -211,12 +222,14 @@ def test_calculate_crps_for_miner_9(self): np.array([predictions_path]), np.array(real_price_path), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, ) sum_all_scores_2, _ = calculate_crps_for_miner( np.array([predictions_path]), np.array(real_price_path_full), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, ) with self.subTest("Check sum_all_scores equals expected"): @@ -236,6 +249,7 @@ def test_calculate_crps_for_miner_10(self): np.array([predictions_path]), np.array(real_price_path), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, ) self.assertEqual(np.isnan(sum_all_scores), True) @@ -249,6 +263,7 @@ def test_calculate_crps_for_miner_11(self): np.array([predictions_path]), np.array(real_price_path), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, ) self.assertEqual(sum_all_scores, 1061.3650577065207) @@ -272,6 +287,7 @@ def test_calculate_crps_for_miner_12(self): np.array([predictions_path]).astype(float), np.array(real_price_path), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, ) self.assertEqual(sum_all_scores, 12697.728694070156) @@ -295,6 +311,31 @@ def test_calculate_crps_for_miner_13(self): np.array([predictions_path]).astype(float), np.array(real_price_path), time_increment, + prompt_config.LOW_FREQUENCY.scoring_intervals, + ) + + self.assertEqual(sum_all_scores, -1) + + def test_calculate_crps_for_miner_gap_1(self): + time_increment = 300 # 300 seconds = 5 minutes + real_price_path = [50, 60, 65, 80, 90, 94, 101, 120, 130] + predictions_path = [ + 0.00011997788254371478, + 0, + 70, + 82.5, + 89.2, + 100, + 110, + 123, + 131, + ] + + sum_all_scores, _ = calculate_crps_for_miner( + np.array([predictions_path]).astype(float), + np.array(real_price_path), + time_increment, + prompt_config.HIGH_FREQUENCY.scoring_intervals, ) self.assertEqual(sum_all_scores, -1) From d334177a38db19ab7c26ffe69de602bfaa7ce7f5 Mon Sep 17 00:00:00 2001 From: Thykof Date: Thu, 27 Nov 2025 19:02:24 +0100 Subject: [PATCH 15/24] split smoothed scores --- Makefile | 1 - ...a1b95303_add_column_rewards_prompt_name.py | 30 +++++++++++ docs/validator_guide.md | 40 -------------- entrypoint-validator.sh | 1 - neurons/validator.py | 26 +++++---- synth/db/models.py | 2 + synth/utils/config.py | 7 --- synth/validator/forward.py | 53 ++++++++++--------- synth/validator/miner_data_handler.py | 39 ++++++++++---- synth/validator/moving_average.py | 18 +++---- synth/validator/prompt_config.py | 20 ++++--- synth/validator/reward.py | 9 ++-- tests/test_forward.py | 42 ++++++--------- tests/test_miner_data_handler.py | 2 +- tests/test_moving_average.py | 9 ++-- validator.config.js | 2 +- validator.test.config.js | 2 +- 17 files changed, 148 insertions(+), 155 deletions(-) create mode 100644 alembic/versions/2b28a1b95303_add_column_rewards_prompt_name.py diff --git a/Makefile b/Makefile index c3877cc9..ab47e899 100644 --- a/Makefile +++ b/Makefile @@ -34,4 +34,3 @@ validator: --logging.$(logging_level) \ --neuron.axon_off true \ --ewma.window_days $(ewma_window_days) \ - --ewma.cutoff_days $(ewma_cutoff_days) diff --git a/alembic/versions/2b28a1b95303_add_column_rewards_prompt_name.py b/alembic/versions/2b28a1b95303_add_column_rewards_prompt_name.py new file mode 100644 index 00000000..fb5220de --- /dev/null +++ b/alembic/versions/2b28a1b95303_add_column_rewards_prompt_name.py @@ -0,0 +1,30 @@ +"""add column rewards prompt_name + +Revision ID: 2b28a1b95303 +Revises: a9227b0cb10b +Create Date: 2025-11-27 17:57:01.394792 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "2b28a1b95303" +down_revision: Union[str, None] = "a9227b0cb10b" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.add_column( + "miner_rewards", sa.Column("prompt_name", sa.Text, nullable=True) + ) + op.execute("UPDATE miner_rewards SET prompt_name='LOW_FREQUENCY'") + + +def downgrade() -> None: + op.drop_column("miner_rewards", "prompt_name") diff --git a/docs/validator_guide.md b/docs/validator_guide.md index 593c82fe..ca096808 100644 --- a/docs/validator_guide.md +++ b/docs/validator_guide.md @@ -9,7 +9,6 @@ - [5. Options](#5-options) - [5.1. Common Options](#51-common-options) - [`--axon.port INTEGER`](#--axonport-integer) - - [`--ewma.cutoff_days INTEGER`](#--ewmacutoff_days-integer) - [`--ewma.window_days INTEGER`](#--ewmawindow_days-float) - [`--logging.debug`](#--loggingdebug) - [`--logging.info`](#--logginginfo) @@ -283,45 +282,6 @@ pm2 start validator.test.config.js -- --axon.port 8091 [Back to top ^][table-of-contents] -#### `--ewma.cutoff_days INTEGER` - -The number of days against which to run the moving average, (e.g. 1). - -Default: `2` - -Example: - -```js -// validator.config.js -module.exports = { - apps: [ - { - name: "validator", - interpreter: "python3", - script: "./neurons/validator.py", - args: "--ewma.cutoff_days 10", - env: { - PYTHONPATH: ".", - }, - }, - ], -}; -``` - -Alternatively, you can add the args directly to the command: - -```shell -pm2 start validator.config.js -- --ewma.cutoff_days 10 -``` - -for testnet it's: - -```shell -pm2 start validator.test.config.js -- --ewma.cutoff_days 10 -``` - -[Back to top ^][table-of-contents] - #### `--ewma.window_days INTEGER` The window in days for the rolling average, (e.g. 10). diff --git a/entrypoint-validator.sh b/entrypoint-validator.sh index 3b392006..da3bee25 100644 --- a/entrypoint-validator.sh +++ b/entrypoint-validator.sh @@ -23,7 +23,6 @@ python3.10 ./neurons/validator.py \ --logging.debug \ --neuron.axon_off true \ --ewma.window_days $ewma_window_days \ - --ewma.cutoff_days $ewma_cutoff_days \ --softmax.beta $softmax_beta \ --neuron.vpermit_tao_limit $vpermit_tao_limit \ --gcp.log_id_prefix $log_id_prefix \ diff --git a/neurons/validator.py b/neurons/validator.py index 5ddd9de9..2937ad2d 100644 --- a/neurons/validator.py +++ b/neurons/validator.py @@ -35,7 +35,7 @@ from synth.utils.opening_hours import should_skip_xau from synth.validator.forward import ( calculate_moving_average_and_update_rewards, - calculate_rewards_and_update_scores, + calculate_scores, get_available_miners_and_update_metagraph_history, query_available_miners_and_save_responses, send_weights_to_bittensor_and_update_weights_history, @@ -143,6 +143,7 @@ def cycle_low_frequency(self, asset: str): cycle_start_time = get_current_time() self.sync() + # update the miners, also for the high frequency prompt that will use the same list self.miner_uids = get_available_miners_and_update_metagraph_history( base_neuron=self, miner_data_handler=self.miner_data_handler, @@ -158,11 +159,11 @@ def cycle_high_frequency(self, asset: str): current_time = get_current_time() scored_time: datetime = round_time_to_minutes(current_time) - calculate_rewards_and_update_scores( - miner_data_handler=self.miner_data_handler, - price_data_provider=self.price_data_provider, - scored_time=scored_time, - cutoff_days=self.config.ewma.cutoff_days, + calculate_scores( + self.miner_data_handler, + self.price_data_provider, + scored_time, + HIGH_FREQUENCY, ) self.schedule_cycle(cycle_start_time, HIGH_FREQUENCY) @@ -215,11 +216,11 @@ def forward_score_low_frequency(self): # we store the rewards in the miner_scores table # ========================================== # - success = calculate_rewards_and_update_scores( - miner_data_handler=self.miner_data_handler, - price_data_provider=self.price_data_provider, - scored_time=scored_time, - cutoff_days=self.config.ewma.cutoff_days, + success = calculate_scores( + self.miner_data_handler, + self.price_data_provider, + scored_time, + LOW_FREQUENCY, ) if not success: @@ -234,9 +235,6 @@ def forward_score_low_frequency(self): moving_averages_data = calculate_moving_average_and_update_rewards( miner_data_handler=self.miner_data_handler, scored_time=scored_time, - cutoff_days=self.config.ewma.cutoff_days, - window_days=self.config.ewma.window_days, - softmax_beta=self.config.softmax.beta, ) if len(moving_averages_data) == 0: diff --git a/synth/db/models.py b/synth/db/models.py index 3bed9ff3..5c1faace 100644 --- a/synth/db/models.py +++ b/synth/db/models.py @@ -13,6 +13,7 @@ String, JSON, ForeignKey, + Text, ) from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import DeclarativeBase, relationship, Session @@ -131,6 +132,7 @@ class MinerReward(Base): ) smoothed_score = Column(Float, nullable=False) reward_weight = Column(Float, nullable=False) + prompt_name = Column(Text, nullable=True) updated_at = Column(DateTime(timezone=True), nullable=False) miner = relationship("Miner", back_populates="rewards") diff --git a/synth/utils/config.py b/synth/utils/config.py index 3a2b42f6..5e5e4904 100644 --- a/synth/utils/config.py +++ b/synth/utils/config.py @@ -238,13 +238,6 @@ def add_validator_args(_, parser: argparse.ArgumentParser): default=10, ) - parser.add_argument( - "--ewma.cutoff_days", - type=int, - help="The number of days against which to run the moving average", - default=10, - ) - parser.add_argument( "--softmax.beta", type=float, diff --git a/synth/validator/forward.py b/synth/validator/forward.py index 9168dacf..0283cfbc 100644 --- a/synth/validator/forward.py +++ b/synth/validator/forward.py @@ -36,6 +36,7 @@ convert_list_elements_to_str, ) from synth.utils.uids import check_uid_availability +from synth.validator import prompt_config from synth.validator.miner_data_handler import MinerDataHandler from synth.validator.moving_average import ( compute_smoothed_score, @@ -86,45 +87,48 @@ def send_weights_to_bittensor_and_update_weights_history( def calculate_moving_average_and_update_rewards( miner_data_handler: MinerDataHandler, scored_time: datetime, - cutoff_days: int, - window_days: int, - softmax_beta: float, ) -> list[dict]: - # apply custom moving average rewards - miner_scores_df = miner_data_handler.get_miner_scores( - scored_time=scored_time, - cutoff_days=cutoff_days, - ) + prompts = [prompt_config.LOW_FREQUENCY, prompt_config.HIGH_FREQUENCY] + + moving_averages_data: dict[str, list[dict]] = {} + for prompt in prompts: + miner_scores_df = miner_data_handler.get_miner_scores( + scored_time, + prompt.window_days, + prompt.time_length, + ) - df = prepare_df_for_moving_average(miner_scores_df) + df = prepare_df_for_moving_average(miner_scores_df) - moving_averages_data = compute_smoothed_score( - miner_data_handler=miner_data_handler, - input_df=df, - window_days=window_days, - scored_time=scored_time, - softmax_beta=softmax_beta, - ) + moving_averages = compute_smoothed_score( + miner_data_handler, + df, + scored_time, + prompt, + ) + + if moving_averages is None: + continue - if moving_averages_data is None: - return [] + print_rewards_df(moving_averages, prompt.label) - print_rewards_df(moving_averages_data) + miner_data_handler.update_miner_rewards(moving_averages) + moving_averages_data[prompt.label] = moving_averages - miner_data_handler.update_miner_rewards(moving_averages_data) + # TODO: combine the 2 smoothed scores - return moving_averages_data + return moving_averages -def calculate_rewards_and_update_scores( +def calculate_scores( miner_data_handler: MinerDataHandler, price_data_provider: PriceDataProvider, scored_time: datetime, - cutoff_days: int, + prompt: prompt_config.PromptConfig, ) -> bool: # get latest prediction request from validator validator_requests = miner_data_handler.get_validator_requests_to_score( - scored_time, cutoff_days + scored_time, prompt.window_days, prompt.time_length ) if validator_requests is None or len(validator_requests) == 0: @@ -135,7 +139,6 @@ def calculate_rewards_and_update_scores( fail_count = 0 for validator_request in validator_requests: - bt.logging.debug(f"validator_request_id: {validator_request.id}") prompt_scores, detailed_info, real_prices = get_rewards( diff --git a/synth/validator/miner_data_handler.py b/synth/validator/miner_data_handler.py index 08ab3d88..ec8a497a 100644 --- a/synth/validator/miner_data_handler.py +++ b/synth/validator/miner_data_handler.py @@ -41,7 +41,7 @@ WeightsUpdateHistory, ) from synth.simulation_input import SimulationInput -from synth.validator import response_validation_v2 +from synth.validator import prompt_config, response_validation_v2 class MinerDataHandler: @@ -343,14 +343,18 @@ def get_miner_prediction( def get_validator_requests_to_score( self, scored_time: datetime, - cutoff_days: int, + window_days: int, + time_length: int | None = None, ) -> typing.Optional[list[ValidatorRequest]]: """ Retrieve the list of IDs of the latest validator requests that (start_time + time_length) < scored_time - and (start_time + time_length) >= scored_time - cutoff_days. - This is to ensure that we only get requests that are within the cutoff_days. + and (start_time + time_length) >= scored_time - window_days. + This is to ensure that we only get requests that are within the window_days. and exclude records that are already scored """ + if time_length is None: + time_length = prompt_config.LOW_FREQUENCY.time_length + try: with self.engine.connect() as connection: subq = ( @@ -390,13 +394,14 @@ def get_validator_requests_to_score( and_( # Compare start_time plus an interval (in seconds) to the scored_time. window_start < scored_time, - # Compare start_time plus an interval (in seconds) to the cutoff_days. - # This is to ensure that we only get requests that are within the cutoff_days. - # Because we want to include in the moving average only the requests that are within the cutoff_days. + # Compare start_time plus an interval (in seconds) to the window_days. + # This is to ensure that we only get requests that are within the window_days. + # Because we want to include in the moving average only the requests that are within the window_days. window_start - >= scored_time - timedelta(days=cutoff_days), + >= scored_time - timedelta(days=window_days), # Exclude records that have a matching miner_prediction via the NOT EXISTS clause. not_(exists(subq)), + ValidatorRequest.time_length == time_length, ) ) .order_by(ValidatorRequest.start_time.asc()) @@ -469,8 +474,15 @@ def update_metagraph_history(self, metagraph_info: list): ) traceback.print_exc(file=sys.stderr) - def get_miner_scores(self, scored_time: datetime, cutoff_days: int): - min_scored_time = scored_time - timedelta(days=cutoff_days) + def get_miner_scores( + self, + scored_time: datetime, + window_days: int, + time_length: int | None = None, + ): + min_scored_time = scored_time - timedelta(days=window_days) + if time_length is None: + time_length = prompt_config.LOW_FREQUENCY.time_length try: with self.engine.connect() as connection: @@ -492,7 +504,12 @@ def get_miner_scores(self, scored_time: datetime, cutoff_days: int): ValidatorRequest.id == MinerPrediction.validator_requests_id, ) - .where(MinerScore.scored_time > min_scored_time) + .where( + and_( + MinerScore.scored_time > min_scored_time, + ValidatorRequest.time_length == time_length, + ) + ) ) result = connection.execute(query) diff --git a/synth/validator/moving_average.py b/synth/validator/moving_average.py index dc965e21..ba33de35 100644 --- a/synth/validator/moving_average.py +++ b/synth/validator/moving_average.py @@ -9,6 +9,7 @@ from synth.validator.miner_data_handler import MinerDataHandler +from synth.validator.prompt_config import PromptConfig from synth.validator.reward import compute_softmax @@ -52,6 +53,7 @@ def prepare_df_for_moving_average(df): ) # 4) left‐merge the real data onto that grid + full["scored_time"] = pd.to_datetime(full["scored_time"]) full = full.merge(df, on=["miner_id", "scored_time"], how="left").merge( miner_first, on="miner_id", how="left" ) @@ -122,9 +124,8 @@ def apply_per_asset_coefficients( def compute_smoothed_score( miner_data_handler: MinerDataHandler, input_df: DataFrame, - window_days: int, scored_time: datetime, - softmax_beta: float, + prompt_config: PromptConfig, ) -> typing.Optional[list[dict]]: if input_df.empty: return None @@ -140,11 +141,7 @@ def compute_smoothed_score( group_df["scored_time"] = pd.to_datetime(group_df["scored_time"]) group_df = group_df.sort_values("scored_time") - # Only consider rows within the last 10 days from scored_time - min_time = scored_time - pd.Timedelta(days=window_days) - mask = (group_df["scored_time"] > min_time) & ( - group_df["scored_time"] <= scored_time - ) + mask = group_df["scored_time"] <= scored_time window_df = group_df.loc[mask] # Drop NaN prompt_score_v3 @@ -181,7 +178,7 @@ def compute_smoothed_score( r["rolling_avg"] for r in filtered_moving_averages_data ] reward_weight_list = compute_softmax( - np.array(rolling_avg_list), softmax_beta + np.array(rolling_avg_list), prompt_config.softmax_beta ) rewards = [] @@ -197,13 +194,14 @@ def compute_smoothed_score( "smoothed_score": item["rolling_avg"], "reward_weight": float(reward_weight), "updated_at": scored_time.isoformat(), + "prompt_name": prompt_config.__name__, } ) return rewards -def print_rewards_df(moving_averages_data): - bt.logging.info("Scored responses moving averages:") +def print_rewards_df(moving_averages_data: list[dict], label: str = ""): + bt.logging.info(f"Scored responses moving averages {label}") df = pd.DataFrame.from_dict(moving_averages_data) bt.logging.info(df.to_string()) diff --git a/synth/validator/prompt_config.py b/synth/validator/prompt_config.py index 45477497..0cb39b4e 100644 --- a/synth/validator/prompt_config.py +++ b/synth/validator/prompt_config.py @@ -10,6 +10,8 @@ class PromptConfig: total_cycle_minutes: int timeout_extra_seconds: int scoring_intervals: dict[str, int] # Define scoring intervals in seconds. + window_days: int + softmax_beta: float LOW_FREQUENCY = PromptConfig( @@ -25,6 +27,8 @@ class PromptConfig: "3hour": 10800, # 3 hours "24hour_abs": 86400, # 24 hours }, + window_days=10, + softmax_beta=-0.1, ) HIGH_FREQUENCY = PromptConfig( @@ -46,12 +50,14 @@ class PromptConfig: "0_15min_gaps": 900, "0_20min_gaps": 1200, "0_25min_gaps": 1500, - "0_30min_gaps": 300, - "0_35min_gaps": 300, - "0_40min_gaps": 300, - "0_45min_gaps": 300, - "0_50min_gaps": 300, - "0_55min_gaps": 300, - "0_60min_gaps": 300, + "0_30min_gaps": 1800, + "0_35min_gaps": 2100, + "0_40min_gaps": 2400, + "0_45min_gaps": 2700, + "0_50min_gaps": 3000, + "0_55min_gaps": 3300, + "0_60min_gaps": 3600, }, + window_days=10, + softmax_beta=-0.1, ) diff --git a/synth/validator/reward.py b/synth/validator/reward.py index 0bb35e15..4fcce885 100644 --- a/synth/validator/reward.py +++ b/synth/validator/reward.py @@ -64,9 +64,12 @@ def reward( predictions_path = adjust_predictions(miner_prediction.prediction) simulation_runs = np.array(predictions_path).astype(float) - scoring_intervals = prompt_config.LOW_FREQUENCY.scoring_intervals - if validator_request.time_length == prompt_config.HIGH_FREQUENCY.time_length: - scoring_intervals = prompt_config.HIGH_FREQUENCY.scoring_intervals + scoring_intervals = ( + prompt_config.HIGH_FREQUENCY.scoring_intervals + if validator_request.time_length + == prompt_config.HIGH_FREQUENCY.time_length + else prompt_config.LOW_FREQUENCY.scoring_intervals + ) try: score, detailed_crps_data = calculate_crps_for_miner( diff --git a/tests/test_forward.py b/tests/test_forward.py index babd7118..a40a36de 100644 --- a/tests/test_forward.py +++ b/tests/test_forward.py @@ -12,7 +12,7 @@ from synth.validator import response_validation_v2 from synth.validator.forward import ( calculate_moving_average_and_update_rewards, - calculate_rewards_and_update_scores, + calculate_scores, ) from synth.db.models import Miner, MinerReward from synth.validator.miner_data_handler import MinerDataHandler @@ -28,18 +28,18 @@ def test_calculate_rewards_and_update_scores(db_engine: Engine): price_data_provider = PriceDataProvider() - success = calculate_rewards_and_update_scores( + success = calculate_scores( miner_data_handler=handler, price_data_provider=price_data_provider, scored_time=scored_time, - cutoff_days=7, + window_days=7, ) assert success miner_scores_df = handler.get_miner_scores( scored_time=scored_time, - cutoff_days=2, + window_days=2, ) assert len(miner_scores_df) == len(miner_uids) @@ -55,11 +55,11 @@ def test_calculate_moving_average_and_update_rewards(db_engine: Engine): price_data_provider = PriceDataProvider() - success = calculate_rewards_and_update_scores( + success = calculate_scores( miner_data_handler=handler, price_data_provider=price_data_provider, scored_time=scored_time, - cutoff_days=7, + window_days=7, ) assert success @@ -67,9 +67,6 @@ def test_calculate_moving_average_and_update_rewards(db_engine: Engine): moving_averages_data = calculate_moving_average_and_update_rewards( miner_data_handler=handler, scored_time=scored_time, - cutoff_days=4, - window_days=2, - softmax_beta=-0.003, ) print("moving_averages_data", moving_averages_data) @@ -150,16 +147,16 @@ def test_calculate_moving_average_and_update_rewards_new_miner( # scored time is start time + 24 hours and +4 minutes because new prompt every 64 minutes scored_time = start_time + timedelta(days=1, minutes=4) - success = calculate_rewards_and_update_scores( + success = calculate_scores( miner_data_handler=handler, price_data_provider=price_data_provider, scored_time=scored_time, - cutoff_days=7, + window_days=7, ) miner_scores_df = handler.get_miner_scores( scored_time=scored_time, - cutoff_days=4, + window_days=4, ) print("miner_scores_df", miner_scores_df) @@ -169,9 +166,6 @@ def test_calculate_moving_average_and_update_rewards_new_miner( moving_averages_data = calculate_moving_average_and_update_rewards( miner_data_handler=handler, scored_time=scored_time, - cutoff_days=4, - window_days=2, - softmax_beta=-0.003, ) print("moving_averages_data", moving_averages_data) @@ -277,16 +271,16 @@ def test_calculate_moving_average_and_update_rewards_new_miner_registration( # scored time is start time + 24 hours and +4 minutes because new prompt every 64 minutes scored_time = start_time + timedelta(days=1, minutes=4) - success = calculate_rewards_and_update_scores( + success = calculate_scores( miner_data_handler=handler, price_data_provider=price_data_provider, scored_time=scored_time, - cutoff_days=7, + window_days=7, ) miner_scores_df = handler.get_miner_scores( scored_time=scored_time, - cutoff_days=4, + window_days=4, ) print("miner_scores_df: ", miner_scores_df) @@ -296,9 +290,6 @@ def test_calculate_moving_average_and_update_rewards_new_miner_registration( moving_averages_data = calculate_moving_average_and_update_rewards( miner_data_handler=handler, scored_time=scored_time, - cutoff_days=4, - window_days=2, - softmax_beta=-0.003, ) print("moving_averages_data", moving_averages_data) @@ -397,16 +388,16 @@ def test_calculate_moving_average_and_update_rewards_only_invalid( # scored time is start time + 24 hours and +4 minutes because new prompt every 64 minutes scored_time = start_time + timedelta(days=1, minutes=4) - success = calculate_rewards_and_update_scores( + success = calculate_scores( miner_data_handler=handler, price_data_provider=price_data_provider, scored_time=scored_time, - cutoff_days=7, + window_days=7, ) miner_scores_df = handler.get_miner_scores( scored_time=scored_time, - cutoff_days=4, + window_days=4, ) print("miner_scores_df", miner_scores_df) @@ -416,9 +407,6 @@ def test_calculate_moving_average_and_update_rewards_only_invalid( moving_averages_data = calculate_moving_average_and_update_rewards( miner_data_handler=handler, scored_time=scored_time, - cutoff_days=4, - window_days=2, - softmax_beta=-0.003, ) print("moving_averages_data", moving_averages_data) diff --git a/tests/test_miner_data_handler.py b/tests/test_miner_data_handler.py index 18e5f9b1..4d8fa5d8 100644 --- a/tests/test_miner_data_handler.py +++ b/tests/test_miner_data_handler.py @@ -372,7 +372,7 @@ def test_set_get_scores(db_engine: Engine): miner_scores_df = handler.get_miner_scores( scored_time=scored_time, - cutoff_days=4, + window_days=4, ) print("miner_scores_df", miner_scores_df) diff --git a/tests/test_moving_average.py b/tests/test_moving_average.py index 8ca34f29..6ee1dee4 100644 --- a/tests/test_moving_average.py +++ b/tests/test_moving_average.py @@ -6,6 +6,7 @@ from synth.validator.miner_data_handler import MinerDataHandler from synth.validator.moving_average import compute_smoothed_score +from synth.validator.prompt_config import LOW_FREQUENCY def read_csv(file_name): @@ -18,7 +19,6 @@ def test_moving_average_1(db_engine: Engine): handler = MinerDataHandler(db_engine) scored_time = datetime.fromisoformat("2025-02-21T17:23:00+00:00") - window_days = 2 df = read_csv("cutoff_data_4_days.csv") df["scored_time"] = pd.to_datetime(df["scored_time"]) @@ -26,9 +26,8 @@ def test_moving_average_1(db_engine: Engine): moving_averages_data = compute_smoothed_score( handler, input_df=df, - window_days=window_days, scored_time=scored_time, - softmax_beta=-0.003, + prompt_config=LOW_FREQUENCY, ) # The miner id you want to search for @@ -54,7 +53,6 @@ def test_moving_average_2(db_engine: Engine): handler = MinerDataHandler(db_engine) scored_time = datetime.fromisoformat("2025-02-21T17:23:00+00:00") - window_days = 1 df = read_csv("cutoff_data_2_days.csv") df["scored_time"] = pd.to_datetime(df["scored_time"]) @@ -62,9 +60,8 @@ def test_moving_average_2(db_engine: Engine): moving_averages_data = compute_smoothed_score( handler, input_df=df, - window_days=window_days, scored_time=scored_time, - softmax_beta=-0.003, + prompt_config=LOW_FREQUENCY, ) # The miner id you want to search for diff --git a/validator.config.js b/validator.config.js index 50b53848..4aadc193 100644 --- a/validator.config.js +++ b/validator.config.js @@ -4,7 +4,7 @@ module.exports = { name: "validator", interpreter: "python3", script: "./neurons/validator.py", - args: "--netuid 50 --logging.debug --wallet.name validator --wallet.hotkey default --neuron.axon_off true --neuron.vpermit_tao_limit 999999 --ewma.window_days 10 --ewma.cutoff_days 10 --softmax.beta -0.1", + args: "--netuid 50 --logging.debug --wallet.name validator --wallet.hotkey default --neuron.axon_off true --neuron.vpermit_tao_limit 999999 --ewma.window_days 10 --softmax.beta -0.1", env: { PYTHONPATH: ".", }, diff --git a/validator.test.config.js b/validator.test.config.js index 13e01059..9b03deff 100644 --- a/validator.test.config.js +++ b/validator.test.config.js @@ -4,7 +4,7 @@ module.exports = { name: "validator", interpreter: "python3", script: "./neurons/validator.py", - args: "--netuid 247 --logging.debug --logging.trace --subtensor.network test --wallet.name validator --wallet.hotkey default --neuron.axon_off true --ewma.window_days 10 --ewma.cutoff_days 10 --softmax.beta -0.1", + args: "--netuid 247 --logging.debug --logging.trace --subtensor.network test --wallet.name validator --wallet.hotkey default --neuron.axon_off true --ewma.window_days 10 --softmax.beta -0.1", env: { PYTHONPATH: ".", }, From 004df4496dd337e5d21303187a9952e8547f7c12 Mon Sep 17 00:00:00 2001 From: Thykof Date: Fri, 28 Nov 2025 11:59:35 +0100 Subject: [PATCH 16/24] combine smoothed scores --- ...a1b95303_add_column_rewards_prompt_name.py | 2 +- neurons/validator.py | 2 +- synth/validator/forward.py | 5 ++-- synth/validator/moving_average.py | 20 ++++++++++++++- tests/test_forward.py | 25 +++++++++++-------- 5 files changed, 37 insertions(+), 17 deletions(-) diff --git a/alembic/versions/2b28a1b95303_add_column_rewards_prompt_name.py b/alembic/versions/2b28a1b95303_add_column_rewards_prompt_name.py index fb5220de..0bc5b172 100644 --- a/alembic/versions/2b28a1b95303_add_column_rewards_prompt_name.py +++ b/alembic/versions/2b28a1b95303_add_column_rewards_prompt_name.py @@ -23,7 +23,7 @@ def upgrade() -> None: op.add_column( "miner_rewards", sa.Column("prompt_name", sa.Text, nullable=True) ) - op.execute("UPDATE miner_rewards SET prompt_name='LOW_FREQUENCY'") + op.execute("UPDATE miner_rewards SET prompt_name='low'") def downgrade() -> None: diff --git a/neurons/validator.py b/neurons/validator.py index 2937ad2d..9bc58ef0 100644 --- a/neurons/validator.py +++ b/neurons/validator.py @@ -129,7 +129,7 @@ def schedule_cycle( method = ( self.cycle_low_frequency - if prompt_config.label == "low" + if prompt_config.label == LOW_FREQUENCY.label else self.cycle_high_frequency ) self.scheduler.enter( diff --git a/synth/validator/forward.py b/synth/validator/forward.py index 0283cfbc..fae34e87 100644 --- a/synth/validator/forward.py +++ b/synth/validator/forward.py @@ -39,6 +39,7 @@ from synth.validator import prompt_config from synth.validator.miner_data_handler import MinerDataHandler from synth.validator.moving_average import ( + combine_moving_averages, compute_smoothed_score, prepare_df_for_moving_average, print_rewards_df, @@ -115,9 +116,7 @@ def calculate_moving_average_and_update_rewards( miner_data_handler.update_miner_rewards(moving_averages) moving_averages_data[prompt.label] = moving_averages - # TODO: combine the 2 smoothed scores - - return moving_averages + return combine_moving_averages(moving_averages_data) def calculate_scores( diff --git a/synth/validator/moving_average.py b/synth/validator/moving_average.py index ba33de35..729c6d44 100644 --- a/synth/validator/moving_average.py +++ b/synth/validator/moving_average.py @@ -194,7 +194,7 @@ def compute_smoothed_score( "smoothed_score": item["rolling_avg"], "reward_weight": float(reward_weight), "updated_at": scored_time.isoformat(), - "prompt_name": prompt_config.__name__, + "prompt_name": prompt_config.label, } ) @@ -205,3 +205,21 @@ def print_rewards_df(moving_averages_data: list[dict], label: str = ""): bt.logging.info(f"Scored responses moving averages {label}") df = pd.DataFrame.from_dict(moving_averages_data) bt.logging.info(df.to_string()) + + +def combine_moving_averages( + moving_averages_data: dict[str, list[dict]], +) -> list[dict]: + map_miner_reward: dict[int, dict] = {} + + for moving_averages in list(moving_averages_data.values()): + for reward in moving_averages: + miner_id = reward["miner_id"] + if miner_id in map_miner_reward: + map_miner_reward[miner_id]["reward_weight"] += reward[ + "reward_weight" + ] + else: + map_miner_reward[miner_id] = reward + + return list(map_miner_reward.values()) diff --git a/tests/test_forward.py b/tests/test_forward.py index a40a36de..95ae43a4 100644 --- a/tests/test_forward.py +++ b/tests/test_forward.py @@ -17,6 +17,7 @@ from synth.db.models import Miner, MinerReward from synth.validator.miner_data_handler import MinerDataHandler from synth.validator.price_data_provider import PriceDataProvider +from synth.validator import prompt_config from tests.utils import prepare_random_predictions @@ -32,14 +33,13 @@ def test_calculate_rewards_and_update_scores(db_engine: Engine): miner_data_handler=handler, price_data_provider=price_data_provider, scored_time=scored_time, - window_days=7, + prompt=prompt_config.HIGH_FREQUENCY, ) assert success miner_scores_df = handler.get_miner_scores( - scored_time=scored_time, - window_days=2, + scored_time=scored_time, prompt=prompt_config.HIGH_FREQUENCY ) assert len(miner_scores_df) == len(miner_uids) @@ -59,7 +59,7 @@ def test_calculate_moving_average_and_update_rewards(db_engine: Engine): miner_data_handler=handler, price_data_provider=price_data_provider, scored_time=scored_time, - window_days=7, + prompt=prompt_config.HIGH_FREQUENCY, ) assert success @@ -151,12 +151,13 @@ def test_calculate_moving_average_and_update_rewards_new_miner( miner_data_handler=handler, price_data_provider=price_data_provider, scored_time=scored_time, - window_days=7, + prompt=prompt_config.HIGH_FREQUENCY, ) miner_scores_df = handler.get_miner_scores( scored_time=scored_time, - window_days=4, + window_days=prompt_config.HIGH_FREQUENCY.window_days, + time_length=prompt_config.HIGH_FREQUENCY.time_length, ) print("miner_scores_df", miner_scores_df) @@ -275,12 +276,13 @@ def test_calculate_moving_average_and_update_rewards_new_miner_registration( miner_data_handler=handler, price_data_provider=price_data_provider, scored_time=scored_time, - window_days=7, + prompt=prompt_config.HIGH_FREQUENCY, ) miner_scores_df = handler.get_miner_scores( scored_time=scored_time, - window_days=4, + window_days=prompt_config.HIGH_FREQUENCY.window_days, + time_length=prompt_config.HIGH_FREQUENCY.time_length, ) print("miner_scores_df: ", miner_scores_df) @@ -392,12 +394,13 @@ def test_calculate_moving_average_and_update_rewards_only_invalid( miner_data_handler=handler, price_data_provider=price_data_provider, scored_time=scored_time, - window_days=7, + prompt=prompt_config.HIGH_FREQUENCY, ) miner_scores_df = handler.get_miner_scores( - scored_time=scored_time, - window_days=4, + scored_time, + prompt_config.HIGH_FREQUENCY.window_days, + prompt_config.HIGH_FREQUENCY.time_length, ) print("miner_scores_df", miner_scores_df) From 5e85b357af3799f25ac2273244cb1cde765ce033 Mon Sep 17 00:00:00 2001 From: Thykof Date: Fri, 28 Nov 2025 11:59:39 +0100 Subject: [PATCH 17/24] lint --- pyproject.toml | 2 +- synth/validator/forward.py | 7 ++++-- synth/validator/miner_data_handler.py | 2 +- synth/validator/price_data_provider.py | 8 +++---- synth/validator/response_validation_v2.py | 2 ++ synth/validator/reward.py | 14 +++++------ tests/test_miner_data_handler.py | 29 ++++++++++++++++++----- tests/test_response_validation.py | 16 ++++++------- 8 files changed, 50 insertions(+), 30 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3a635059..48bded74 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,7 @@ exclude = ''' ''' [tool.mypy] -disable_error_code = ["type-arg"] +disable_error_code = ["type-arg", "import-untyped", "no-untyped-def", "no-untyped-call"] python_version = 3.10 strict = true show_error_codes = true diff --git a/synth/validator/forward.py b/synth/validator/forward.py index fae34e87..45ed7c6a 100644 --- a/synth/validator/forward.py +++ b/synth/validator/forward.py @@ -154,11 +154,14 @@ def calculate_scores( continue miner_score_time = validator_request.start_time + timedelta( - seconds=validator_request.time_length + seconds=int(validator_request.time_length) ) miner_data_handler.set_miner_scores( - real_prices, validator_request.id, detailed_info, miner_score_time + real_prices, + int(validator_request.id), + detailed_info, + miner_score_time, ) # Success if at least one request succeed diff --git a/synth/validator/miner_data_handler.py b/synth/validator/miner_data_handler.py index ec8a497a..84af0f6c 100644 --- a/synth/validator/miner_data_handler.py +++ b/synth/validator/miner_data_handler.py @@ -101,7 +101,7 @@ def get_latest_asset(self, time_length: int) -> str | None: return None # Return the asset with the least count - return result[0].asset + return str(result[0].asset) except Exception as e: bt.logging.error(f"in get_next_asset (got an exception): {e}") traceback.print_exc(file=sys.stderr) diff --git a/synth/validator/price_data_provider.py b/synth/validator/price_data_provider.py index 59a70316..a505f711 100644 --- a/synth/validator/price_data_provider.py +++ b/synth/validator/price_data_provider.py @@ -37,7 +37,7 @@ class PriceDataProvider: reraise=True, before=before_log(bt.logging._logger, logging.DEBUG), ) - def fetch_data(self, validator_request: ValidatorRequest) -> list[dict]: + def fetch_data(self, validator_request: ValidatorRequest) -> list: """ Fetch real prices data from an external REST service. Returns an array of time points with prices. @@ -51,7 +51,7 @@ def fetch_data(self, validator_request: ValidatorRequest) -> list[dict]: end_time_int = start_time_int + validator_request.time_length params = { - "symbol": self._get_token_mapping(validator_request.asset), + "symbol": self._get_token_mapping(str(validator_request.asset)), "resolution": 1, "from": start_time_int, "to": end_time_int, @@ -65,8 +65,8 @@ def fetch_data(self, validator_request: ValidatorRequest) -> list[dict]: transformed_data = self._transform_data( data, start_time_int, - validator_request.time_increment, - validator_request.time_length, + int(validator_request.time_increment), + int(validator_request.time_length), ) return transformed_data diff --git a/synth/validator/response_validation_v2.py b/synth/validator/response_validation_v2.py index bce1cf75..07f0c904 100644 --- a/synth/validator/response_validation_v2.py +++ b/synth/validator/response_validation_v2.py @@ -45,6 +45,8 @@ def validate_response_type(response) -> typing.Optional[str]: if not isinstance(response[1], int): return f"Time increment format is incorrect: expected int, got {type(response[1])}" + return None + def validate_responses( response, diff --git a/synth/validator/reward.py b/synth/validator/reward.py index 4fcce885..b69c0318 100644 --- a/synth/validator/reward.py +++ b/synth/validator/reward.py @@ -48,7 +48,7 @@ def reward( - float: The reward value for the miner. """ miner_prediction = miner_data_handler.get_miner_prediction( - miner_uid, validator_request.id + miner_uid, int(validator_request.id) ) if miner_prediction is None: @@ -61,7 +61,7 @@ def reward( if len(real_prices) == 0: return -1, [], miner_prediction - predictions_path = adjust_predictions(miner_prediction.prediction) + predictions_path = adjust_predictions(list(miner_prediction.prediction)) simulation_runs = np.array(predictions_path).astype(float) scoring_intervals = ( @@ -75,7 +75,7 @@ def reward( score, detailed_crps_data = calculate_crps_for_miner( simulation_runs, np.array(real_prices), - validator_request.time_increment, + int(validator_request.time_increment), scoring_intervals, ) except Exception as e: @@ -112,7 +112,7 @@ def get_rewards( """ miner_uids = miner_data_handler.get_miner_uid_of_prediction_request( - validator_request.id + int(validator_request.id) ) if miner_uids is None: @@ -198,10 +198,8 @@ def compute_softmax(score_values: np.ndarray, beta: float) -> np.ndarray: bt.logging.info(f"Going to use the following value of beta: {beta}") exp_scores = np.exp(beta * score_values) - softmax_scores_valid = exp_scores / np.sum(exp_scores) - softmax_scores = softmax_scores_valid - - return softmax_scores + softmax_scores_valid: np.ndarray = exp_scores / np.sum(exp_scores) + return softmax_scores_valid def clean_numpy_in_crps_data(crps_data: list) -> list: diff --git a/tests/test_miner_data_handler.py b/tests/test_miner_data_handler.py index 4d8fa5d8..eccf8368 100644 --- a/tests/test_miner_data_handler.py +++ b/tests/test_miner_data_handler.py @@ -59,12 +59,16 @@ def test_get_values_within_range(db_engine: Engine): validator_requests = handler.get_validator_requests_to_score( scored_time, 7 ) + assert validator_requests is not None assert len(validator_requests) == 1 - result = handler.get_miner_prediction(miner_uid, validator_requests[0].id) + result = handler.get_miner_prediction( + miner_uid, int(validator_requests[0].id) + ) # get only second element from the result tuple # that corresponds to the prediction result + assert result is not None prediction = result.prediction assert len(prediction) == 1 @@ -116,6 +120,7 @@ def test_get_values_ongoing_range(db_engine: Engine): scored_time, 7 ) + assert validator_requests is not None assert len(validator_requests) == 0 @@ -183,9 +188,12 @@ def test_multiple_records_for_same_miner(db_engine: Engine): validator_requests = handler.get_validator_requests_to_score( scored_time, 7 ) + assert validator_requests is not None assert len(validator_requests) == 2 - result = handler.get_miner_prediction(miner_uid, validator_requests[1].id) + result = handler.get_miner_prediction( + miner_uid, int(validator_requests[1].id) + ) assert result is not None @@ -267,12 +275,16 @@ def test_multiple_records_for_same_miner_with_overlapping(db_engine: Engine): validator_requests = handler.get_validator_requests_to_score( scored_time, 7 ) + assert validator_requests is not None assert len(validator_requests) == 1 - result = handler.get_miner_prediction(miner_uid, validator_requests[0].id) + result = handler.get_miner_prediction( + miner_uid, int(validator_requests[0].id) + ) # get only second element from the result tuple # that corresponds to the prediction result + assert result is not None prediction = result.prediction assert len(prediction) == 1 @@ -296,6 +308,7 @@ def test_no_data_for_miner(db_engine: Engine): validator_requests = handler.get_validator_requests_to_score( scored_time, 7 ) + assert validator_requests is not None assert len(validator_requests) == 0 @@ -335,9 +348,13 @@ def test_get_values_incorrect_format(db_engine: Engine): validator_requests = handler.get_validator_requests_to_score( scored_time, 7 ) + assert validator_requests is not None assert len(validator_requests) == 1 - result = handler.get_miner_prediction(miner_uid, validator_requests[0].id) + result = handler.get_miner_prediction( + miner_uid, int(validator_requests[0].id) + ) + assert result is not None prediction = result.prediction format_validation = result.format_validation @@ -355,7 +372,7 @@ def test_set_get_scores(db_engine: Engine): validator_requests = handler.get_validator_requests_to_score( scored_time, 7 ) - + assert validator_requests is not None assert len(validator_requests) == 1 prompt_scores, detailed_info, real_prices = get_rewards( @@ -367,7 +384,7 @@ def test_set_get_scores(db_engine: Engine): assert prompt_scores is not None handler.set_miner_scores( - real_prices, validator_requests[0].id, detailed_info, scored_time + real_prices, int(validator_requests[0].id), detailed_info, scored_time ) miner_scores_df = handler.get_miner_scores( diff --git a/tests/test_response_validation.py b/tests/test_response_validation.py index 2833611d..cd4bc426 100644 --- a/tests/test_response_validation.py +++ b/tests/test_response_validation.py @@ -103,12 +103,12 @@ def test_validate_responses_incorrect_number_of_paths(): ) assert result == "Number of paths is incorrect: expected 2, got 0" - response = (int(start_time.timestamp()), time_increment, [123.45]) + response2 = (int(start_time.timestamp()), time_increment, [123.45]) request_time = start_time process_time_str = "0" result = validate_responses( - response, simulation_input, request_time, process_time_str + response2, simulation_input, request_time, process_time_str ) assert result == "Number of paths is incorrect: expected 2, got 1" @@ -174,19 +174,19 @@ def test_validate_responses_incorrect_start_time(): == "Start time format is incorrect: expected int, got " ) - response = (start_time.timestamp(), time_increment, [123.45] * 11) + response2 = (start_time.timestamp(), time_increment, [123.45] * 11) request_time = start_time process_time_str = "0" result = validate_responses( - response, simulation_input, request_time, process_time_str + response2, simulation_input, request_time, process_time_str ) assert ( result == "Start time format is incorrect: expected int, got " ) - response = ( + response3 = ( int(start_time.timestamp()) + 1, time_increment, [123.45] * 11, @@ -195,7 +195,7 @@ def test_validate_responses_incorrect_start_time(): process_time_str = "0" result = validate_responses( - response, simulation_input, request_time, process_time_str + response3, simulation_input, request_time, process_time_str ) assert ( result @@ -222,7 +222,7 @@ def test_validate_responses_incorrect_time_increment(): == "Time increment format is incorrect: expected int, got " ) - response = ( + response2 = ( int(start_time.timestamp()), time_increment + 1, [123.45] * 11, @@ -231,7 +231,7 @@ def test_validate_responses_incorrect_time_increment(): process_time_str = "0" result = validate_responses( - response, simulation_input, request_time, process_time_str + response2, simulation_input, request_time, process_time_str ) assert result == "Time increment is incorrect: expected 1, got 2" From f2e24b4db0a9d60709bbb185242009610b6133fa Mon Sep 17 00:00:00 2001 From: Thykof Date: Fri, 28 Nov 2025 14:51:48 +0100 Subject: [PATCH 18/24] smoothed_score_coefficient --- synth/validator/moving_average.py | 2 +- synth/validator/prompt_config.py | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/synth/validator/moving_average.py b/synth/validator/moving_average.py index 5135c559..eb370ec0 100644 --- a/synth/validator/moving_average.py +++ b/synth/validator/moving_average.py @@ -201,7 +201,7 @@ def compute_smoothed_score( "miner_id": item["miner_id"], "miner_uid": item["miner_uid"], "smoothed_score": item["rolling_avg"], - "reward_weight": float(reward_weight), + "reward_weight": float(reward_weight) * prompt_config.smoothed_score_coefficient, "updated_at": scored_time.isoformat(), "prompt_name": prompt_config.label, } diff --git a/synth/validator/prompt_config.py b/synth/validator/prompt_config.py index 0cb39b4e..2e59bdf4 100644 --- a/synth/validator/prompt_config.py +++ b/synth/validator/prompt_config.py @@ -12,6 +12,7 @@ class PromptConfig: scoring_intervals: dict[str, int] # Define scoring intervals in seconds. window_days: int softmax_beta: float + smoothed_score_coefficient: float LOW_FREQUENCY = PromptConfig( @@ -29,6 +30,7 @@ class PromptConfig: }, window_days=10, softmax_beta=-0.1, + smoothed_score_coefficient=0.5, ) HIGH_FREQUENCY = PromptConfig( @@ -58,6 +60,7 @@ class PromptConfig: "0_55min_gaps": 3300, "0_60min_gaps": 3600, }, - window_days=10, + window_days=1, softmax_beta=-0.1, + smoothed_score_coefficient=0.5, ) From 28068a948b8768030ffe0df1b0f92ac15872ac2f Mon Sep 17 00:00:00 2001 From: Thykof Date: Fri, 28 Nov 2025 15:56:49 +0100 Subject: [PATCH 19/24] improvements --- neurons/validator.py | 6 +++++- synth/base/neuron.py | 2 +- synth/validator/moving_average.py | 3 ++- synth/validator/prompt_config.py | 1 + 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/neurons/validator.py b/neurons/validator.py index 9bc58ef0..7dab5f14 100644 --- a/neurons/validator.py +++ b/neurons/validator.py @@ -140,6 +140,7 @@ def schedule_cycle( ) def cycle_low_frequency(self, asset: str): + bt.logging.info(f"starting the {LOW_FREQUENCY.label} frequency cycle") cycle_start_time = get_current_time() self.sync() @@ -159,6 +160,7 @@ def cycle_high_frequency(self, asset: str): current_time = get_current_time() scored_time: datetime = round_time_to_minutes(current_time) + bt.logging.info(f"forward score {HIGH_FREQUENCY.label} frequency") calculate_scores( self.miner_data_handler, self.price_data_provider, @@ -168,6 +170,7 @@ def cycle_high_frequency(self, asset: str): self.schedule_cycle(cycle_start_time, HIGH_FREQUENCY) def forward_prompt(self, asset: str, prompt_config: PromptConfig): + bt.logging.info(f"forward prompt for {prompt_config.label} frequency") if len(self.miner_uids) == 0: bt.logging.error( "No miners available", @@ -192,7 +195,7 @@ def forward_prompt(self, asset: str, prompt_config: PromptConfig): start_time=start_time.isoformat(), time_increment=prompt_config.time_increment, time_length=prompt_config.time_length, - num_simulations=1000, + num_simulations=prompt_config.num_simulations, ) query_available_miners_and_save_responses( @@ -204,6 +207,7 @@ def forward_prompt(self, asset: str, prompt_config: PromptConfig): ) def forward_score_low_frequency(self): + bt.logging.info(f"forward score {LOW_FREQUENCY.label} frequency") current_time = get_current_time() scored_time: datetime = round_time_to_minutes(current_time) diff --git a/synth/base/neuron.py b/synth/base/neuron.py index 57e2c234..8613c650 100644 --- a/synth/base/neuron.py +++ b/synth/base/neuron.py @@ -127,7 +127,7 @@ def sync(self): # in the run() method. This to save to database the result of the set_weights. # Always save state. - self.save_state() + # self.save_state() def check_registered(self): # --- Check for registration. diff --git a/synth/validator/moving_average.py b/synth/validator/moving_average.py index eb370ec0..dd11e322 100644 --- a/synth/validator/moving_average.py +++ b/synth/validator/moving_average.py @@ -201,7 +201,8 @@ def compute_smoothed_score( "miner_id": item["miner_id"], "miner_uid": item["miner_uid"], "smoothed_score": item["rolling_avg"], - "reward_weight": float(reward_weight) * prompt_config.smoothed_score_coefficient, + "reward_weight": float(reward_weight) + * prompt_config.smoothed_score_coefficient, "updated_at": scored_time.isoformat(), "prompt_name": prompt_config.label, } diff --git a/synth/validator/prompt_config.py b/synth/validator/prompt_config.py index 2e59bdf4..a9eb1d1f 100644 --- a/synth/validator/prompt_config.py +++ b/synth/validator/prompt_config.py @@ -13,6 +13,7 @@ class PromptConfig: window_days: int softmax_beta: float smoothed_score_coefficient: float + num_simulations: int = 1000 LOW_FREQUENCY = PromptConfig( From d5924386856d998fb4ab72ab08f5076610679ec9 Mon Sep 17 00:00:00 2001 From: Thykof Date: Fri, 28 Nov 2025 16:20:12 +0100 Subject: [PATCH 20/24] hardcode parameters unless softmaxbeta for 1h prompt --- Makefile | 1 - docs/validator_guide.md | 11 ++++++----- entrypoint-validator.sh | 3 +-- neurons/validator.py | 1 + synth/utils/config.py | 2 +- synth/validator/prompt_config.py | 2 +- validator.config.js | 2 +- validator.test.config.js | 2 +- 8 files changed, 12 insertions(+), 12 deletions(-) diff --git a/Makefile b/Makefile index ab47e899..2e24efce 100644 --- a/Makefile +++ b/Makefile @@ -15,7 +15,6 @@ validator_coldkey_name = validator-base validator_hotkey_name = default ewma_window_days = 10 -ewma_cutoff_days = 10 # Python virtual environment venv_python=bt_venv/bin/python3 diff --git a/docs/validator_guide.md b/docs/validator_guide.md index ca096808..dd3693fc 100644 --- a/docs/validator_guide.md +++ b/docs/validator_guide.md @@ -9,7 +9,8 @@ - [5. Options](#5-options) - [5.1. Common Options](#51-common-options) - [`--axon.port INTEGER`](#--axonport-integer) - - [`--ewma.window_days INTEGER`](#--ewmawindow_days-float) + - [`--ewma.window_days INTEGER`](#--ewmawindow_days-integer) + - [`--softmax.beta FLOAT`](#--softmaxbeta-float) - [`--logging.debug`](#--loggingdebug) - [`--logging.info`](#--logginginfo) - [`--logging.trace`](#--loggingtrace) @@ -317,9 +318,9 @@ pm2 start validator.config.js -- --ewma.window_days 10 #### `--softmax.beta FLOAT` -Negative beta to give higher weight to lower scores. +Negative beta to give higher weight to lower scores for the 1h prompt -Default: `-0.002` +Default: `-0.05` Example: @@ -331,7 +332,7 @@ module.exports = { name: "validator", interpreter: "python3", script: "./neurons/validator.py", - args: "--softmax.beta -0.003", + args: "--softmax.beta -0.05", env: { PYTHONPATH: ".", }, @@ -343,7 +344,7 @@ module.exports = { Alternatively, you can add the args directly to the command: ```shell -pm2 start validator.config.js -- --softmax.beta -0.003 +pm2 start validator.config.js -- --softmax.beta -0.05 ``` [Back to top ^][table-of-contents] diff --git a/entrypoint-validator.sh b/entrypoint-validator.sh index da3bee25..d9e4b2bb 100644 --- a/entrypoint-validator.sh +++ b/entrypoint-validator.sh @@ -10,8 +10,7 @@ validator_coldkey_name=validator validator_hotkey_name=default ewma_window_days=10 -ewma_cutoff_days=10 -softmax_beta=-0.1 +softmax_beta=-0.05 log_id_prefix=my_validator_name diff --git a/neurons/validator.py b/neurons/validator.py index 7dab5f14..1b39695d 100644 --- a/neurons/validator.py +++ b/neurons/validator.py @@ -75,6 +75,7 @@ def __init__(self, config=None): self.scheduler = sched.scheduler(time.time, time.sleep) self.miner_uids: list[int] = [] self.asset_list = ["BTC", "ETH", "XAU", "SOL"] + HIGH_FREQUENCY.softmax_beta = self.config.softmax.beta self.assert_assets_supported() diff --git a/synth/utils/config.py b/synth/utils/config.py index 5e5e4904..f23d52d4 100644 --- a/synth/utils/config.py +++ b/synth/utils/config.py @@ -242,7 +242,7 @@ def add_validator_args(_, parser: argparse.ArgumentParser): "--softmax.beta", type=float, help="Negative beta to give higher weight to lower scores.", - default=-0.1, + default=-0.05, ) diff --git a/synth/validator/prompt_config.py b/synth/validator/prompt_config.py index a9eb1d1f..8923ecf7 100644 --- a/synth/validator/prompt_config.py +++ b/synth/validator/prompt_config.py @@ -62,6 +62,6 @@ class PromptConfig: "0_60min_gaps": 3600, }, window_days=1, - softmax_beta=-0.1, + softmax_beta=-0.05, smoothed_score_coefficient=0.5, ) diff --git a/validator.config.js b/validator.config.js index 4aadc193..31e20a6d 100644 --- a/validator.config.js +++ b/validator.config.js @@ -4,7 +4,7 @@ module.exports = { name: "validator", interpreter: "python3", script: "./neurons/validator.py", - args: "--netuid 50 --logging.debug --wallet.name validator --wallet.hotkey default --neuron.axon_off true --neuron.vpermit_tao_limit 999999 --ewma.window_days 10 --softmax.beta -0.1", + args: "--netuid 50 --logging.debug --wallet.name validator --wallet.hotkey default --neuron.axon_off true --neuron.vpermit_tao_limit 999999 --ewma.window_days 10 --softmax.beta -0.05", env: { PYTHONPATH: ".", }, diff --git a/validator.test.config.js b/validator.test.config.js index 9b03deff..a5dbe06b 100644 --- a/validator.test.config.js +++ b/validator.test.config.js @@ -4,7 +4,7 @@ module.exports = { name: "validator", interpreter: "python3", script: "./neurons/validator.py", - args: "--netuid 247 --logging.debug --logging.trace --subtensor.network test --wallet.name validator --wallet.hotkey default --neuron.axon_off true --ewma.window_days 10 --softmax.beta -0.1", + args: "--netuid 247 --logging.debug --logging.trace --subtensor.network test --wallet.name validator --wallet.hotkey default --neuron.axon_off true --ewma.window_days 10 --softmax.beta -0.05", env: { PYTHONPATH: ".", }, From 3f399f8b25dd8e5768a61098eb0e19b19db1076d Mon Sep 17 00:00:00 2001 From: Thykof Date: Fri, 28 Nov 2025 16:29:03 +0100 Subject: [PATCH 21/24] Revert "skip score during cloudflare outage (#189)" This reverts commit 42bf3b1906722046321bfe90ec1e4c6599115758. --- synth/validator/miner_data_handler.py | 1 - synth/validator/moving_average.py | 9 --------- 2 files changed, 10 deletions(-) diff --git a/synth/validator/miner_data_handler.py b/synth/validator/miner_data_handler.py index f78b597f..84af0f6c 100644 --- a/synth/validator/miner_data_handler.py +++ b/synth/validator/miner_data_handler.py @@ -493,7 +493,6 @@ def get_miner_scores( MinerScore.scored_time, MinerScore.score_details_v3, ValidatorRequest.asset, - ValidatorRequest.start_time, ) .select_from(MinerScore) .join( diff --git a/synth/validator/moving_average.py b/synth/validator/moving_average.py index dd11e322..bf0da9cd 100644 --- a/synth/validator/moving_average.py +++ b/synth/validator/moving_average.py @@ -17,15 +17,6 @@ def prepare_df_for_moving_average(df): df = df.copy() df["scored_time"] = pd.to_datetime(df["scored_time"]) - # 0) Temporary exclude a period - df["start_time"] = pd.to_datetime(df["start_time"]) - exclude_start = datetime.fromisoformat("2025-11-18 11:53:00+00:00") - exclude_end = datetime.fromisoformat("2025-11-18 14:08:00+00:00") - mask_exclude = (df["start_time"] >= exclude_start) & ( - df["start_time"] <= exclude_end - ) - df = df.loc[~mask_exclude] - # 1) compute globals global_min = df["scored_time"].min() all_times = sorted(df["scored_time"].unique()) From 46c7b1f8723308634489ae92246ed11079a7e7cc Mon Sep 17 00:00:00 2001 From: Thykof Date: Mon, 1 Dec 2025 13:20:42 +0100 Subject: [PATCH 22/24] fix forward tests --- tests/test_forward.py | 34 ++++++++++------------------------ 1 file changed, 10 insertions(+), 24 deletions(-) diff --git a/tests/test_forward.py b/tests/test_forward.py index 95ae43a4..fd45ed4c 100644 --- a/tests/test_forward.py +++ b/tests/test_forward.py @@ -33,14 +33,12 @@ def test_calculate_rewards_and_update_scores(db_engine: Engine): miner_data_handler=handler, price_data_provider=price_data_provider, scored_time=scored_time, - prompt=prompt_config.HIGH_FREQUENCY, + prompt=prompt_config.LOW_FREQUENCY, ) assert success - miner_scores_df = handler.get_miner_scores( - scored_time=scored_time, prompt=prompt_config.HIGH_FREQUENCY - ) + miner_scores_df = handler.get_miner_scores(scored_time, 10) assert len(miner_scores_df) == len(miner_uids) @@ -59,7 +57,7 @@ def test_calculate_moving_average_and_update_rewards(db_engine: Engine): miner_data_handler=handler, price_data_provider=price_data_provider, scored_time=scored_time, - prompt=prompt_config.HIGH_FREQUENCY, + prompt=prompt_config.LOW_FREQUENCY, ) assert success @@ -151,14 +149,10 @@ def test_calculate_moving_average_and_update_rewards_new_miner( miner_data_handler=handler, price_data_provider=price_data_provider, scored_time=scored_time, - prompt=prompt_config.HIGH_FREQUENCY, + prompt=prompt_config.LOW_FREQUENCY, ) - miner_scores_df = handler.get_miner_scores( - scored_time=scored_time, - window_days=prompt_config.HIGH_FREQUENCY.window_days, - time_length=prompt_config.HIGH_FREQUENCY.time_length, - ) + miner_scores_df = handler.get_miner_scores(scored_time, 10) print("miner_scores_df", miner_scores_df) @@ -276,14 +270,10 @@ def test_calculate_moving_average_and_update_rewards_new_miner_registration( miner_data_handler=handler, price_data_provider=price_data_provider, scored_time=scored_time, - prompt=prompt_config.HIGH_FREQUENCY, + prompt=prompt_config.LOW_FREQUENCY, ) - miner_scores_df = handler.get_miner_scores( - scored_time=scored_time, - window_days=prompt_config.HIGH_FREQUENCY.window_days, - time_length=prompt_config.HIGH_FREQUENCY.time_length, - ) + miner_scores_df = handler.get_miner_scores(scored_time, 10) print("miner_scores_df: ", miner_scores_df) @@ -310,7 +300,7 @@ def test_calculate_moving_average_and_update_rewards_new_miner_registration( miner_weights = [ item["reward_weight"] for item in moving_averages_data ] - assert_almost_equal(sum(miner_weights), 1, decimal=12) + assert_almost_equal(sum(miner_weights), 0.5, decimal=12) def test_calculate_moving_average_and_update_rewards_only_invalid( @@ -394,14 +384,10 @@ def test_calculate_moving_average_and_update_rewards_only_invalid( miner_data_handler=handler, price_data_provider=price_data_provider, scored_time=scored_time, - prompt=prompt_config.HIGH_FREQUENCY, + prompt=prompt_config.LOW_FREQUENCY, ) - miner_scores_df = handler.get_miner_scores( - scored_time, - prompt_config.HIGH_FREQUENCY.window_days, - prompt_config.HIGH_FREQUENCY.time_length, - ) + miner_scores_df = handler.get_miner_scores(scored_time, 10) print("miner_scores_df", miner_scores_df) From 0168c7a7c6b1861d30d9f807a32ac821b058e9a3 Mon Sep 17 00:00:00 2001 From: Thykof Date: Mon, 1 Dec 2025 18:31:18 +0100 Subject: [PATCH 23/24] fix migration default value --- .../versions/2b28a1b95303_add_column_rewards_prompt_name.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/alembic/versions/2b28a1b95303_add_column_rewards_prompt_name.py b/alembic/versions/2b28a1b95303_add_column_rewards_prompt_name.py index 0bc5b172..d0c8c2f2 100644 --- a/alembic/versions/2b28a1b95303_add_column_rewards_prompt_name.py +++ b/alembic/versions/2b28a1b95303_add_column_rewards_prompt_name.py @@ -21,9 +21,9 @@ def upgrade() -> None: op.add_column( - "miner_rewards", sa.Column("prompt_name", sa.Text, nullable=True) + "miner_rewards", + sa.Column("prompt_name", sa.Text, server_default="low"), ) - op.execute("UPDATE miner_rewards SET prompt_name='low'") def downgrade() -> None: From 2ff2237ead1122177c638dd6b58a2e3d1910f4d0 Mon Sep 17 00:00:00 2001 From: Thykof Date: Mon, 1 Dec 2025 20:34:21 +0100 Subject: [PATCH 24/24] schedule HFT --- neurons/validator.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/neurons/validator.py b/neurons/validator.py index 1b39695d..aff415a9 100644 --- a/neurons/validator.py +++ b/neurons/validator.py @@ -1,7 +1,7 @@ # The MIT License (MIT) # Copyright © 2023 Yuma Rao # Copyright © 2023 Mode Labs -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import multiprocessing as mp import sched import time @@ -124,6 +124,16 @@ def schedule_cycle( ) delay = (next_cycle - get_current_time()).total_seconds() + # Schedule the launch of high frequency prompt + high_frequency_launch = datetime( + 2025, 12, 2, 18, 0, 0, tzinfo=timezone.utc + ) + if ( + prompt_config.label == HIGH_FREQUENCY.label + and get_current_time() <= high_frequency_launch + ): + return + bt.logging.info( f"Scheduling next {prompt_config.label} frequency cycle for asset {asset} in {delay} seconds" )