Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -262,15 +262,14 @@ $ nohup python neurons/validator.py --netuid 8 --wallet.name <wallet> --wallet.h

# Running a Miner

If you're running a miner, you should see three types of requests: LiveForwardHash, LiveForward, and LiveBackward.
If you're running a miner, you should see two types of requests: LiveForwardHash and LiveForward.

LiveForwardHash is requested first, which will be a hash of your predictions. LiveForward will be requested 60 seconds later which
will request the actual predictions made (non-hashed). Using the hash and the actual predictions, validators can validate the
authenticity of the predictions made, ensuring no participants are copying anothers.

- **LiveForwardHash** - will be when you provide a hash of your predictions.
- **LiveForward** - will be when your miner provides your actual predictions.
- **LiveBackward** - will receive the results that occurred live if you want to use them for any updating purposes.

You'll receive rewards for your predictions `~10 hours` after making them. Therefore, if you start running on the network, you should expect a lag in receiving rewards. Predictions are reviewed and rewarded every 30 minutes.

Expand Down
21 changes: 3 additions & 18 deletions neurons/miner.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,13 +96,13 @@ def get_config():

# TODO: Move this into a stream mining solution class, so each stream can be predicted using different sources, models, etc...
def get_predictions(
tims_ms: int,
time_ms: int,
feature_source: FeatureSource,
feature_scaler: FeatureScaler,
model: BaseMiningModel,
):
# TODO: interval should come from the stream definition
lookback_time_ms = tims_ms - (model.sample_count * INTERVAL_MS)
lookback_time_ms = time_ms - (model.sample_count * INTERVAL_MS)

feature_samples = feature_source.get_feature_samples(
lookback_time_ms, INTERVAL_MS, model.sample_count
Expand Down Expand Up @@ -499,21 +499,6 @@ def live_f(synapse: template.protocol.LiveForward) -> template.protocol.LiveForw
except Exception as e:
bt.logging.error(f"error returning synapse to vali: {e}")

# def lb_blacklist_fn(synapse: template.protocol.LiveBackward) -> Tuple[bool, str]:
# # standardizing not accepting lb for now. Miner can override if they'd like.
# return False, synapse.dendrite.hotkey
#
# def lb_priority_fn(synapse: template.protocol.LiveBackward) -> float:
# caller_uid = metagraph.hotkeys.index( synapse.dendrite.hotkey ) # Get the caller index.
# prirority = float( metagraph.S[ caller_uid ] ) # Return the stake as the priority.
# bt.logging.trace(f'Prioritizing {synapse.dendrite.hotkey} with value: ', prirority)
# return prirority

# def live_b(synapse: template.protocol.LiveBackward) -> template.protocol.LiveBackward:
# bt.logging.debug(f'received lb with length {len(synapse.samples.numpy())}')
# synapse.received = True
# return synapse

# Build and link miner functions to the axon.
# The axon handles request processing, allowing validators to send this process requests.
bt.logging.info(f"setting port [{config.axon.port}]")
Expand Down Expand Up @@ -624,4 +609,4 @@ def live_f(synapse: template.protocol.LiveForward) -> template.protocol.LiveForw

# This is the main function, which runs the miner.
if __name__ == "__main__":
main(get_config())
main(get_config())
30 changes: 4 additions & 26 deletions neurons/validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
)
from template.protocol import (
LiveForward,
LiveBackward,
LiveForwardHash,
)
import time
Expand Down Expand Up @@ -190,7 +189,9 @@ def run_time_series_validation(
# check for invalid miner pred
for miner_pred in predictions:
if math.isnan(miner_pred):
raise ValueError(f"invalid miner preds [{miner_pred}]")
raise ValueError(
f"invalid miner preds [{miner_pred}]"
)

hashed_predictions = HashingUtils.hash_predictions(
miner_hotkey, str(predictions.tolist())
Expand Down Expand Up @@ -287,30 +288,7 @@ def run_time_series_validation(
# TODO: Improve validators to allow multiple features in predictions
validation_array = validation_array.flatten()

bt.logging.info(
"results gathered sending back to miners via backprop and weighing"
)

# Send back the results for backprop so miners can learn
results = bt.tensor(validation_array)

results_backprop_proto = LiveBackward(
request_uuid=request_uuid,
stream_id=stream_type,
samples=results,
topic_id=request_df.topic_id,
)

try:
dendrite.query(
metagraph.axons, results_backprop_proto, deserialize=True
)
bt.logging.info("live results sent back to miners")
except Exception: # noqa
traceback.print_exc()
bt.logging.info(
"failed sending back results to miners and continuing..."
)
bt.logging.info("results gathered sending back to miners via weighing")

scores = {}
for miner_uid, miner_preds in vali_request.predictions.items():
Expand Down