Spaces:
Running
Running
from concurrent.futures import ThreadPoolExecutor | |
from datetime import datetime, timedelta | |
from math import ceil | |
from typing import TypeAlias | |
from fiber.chain.interface import get_substrate | |
from fiber.chain.metagraph import Metagraph | |
from fiber.chain.models import Node | |
from substrateinterface.storage import StorageKey | |
from wandb_data import Hotkey, Uid, TIMEZONE | |
Weight: TypeAlias = float | |
Incentive: TypeAlias = float | |
NET_UID = 39 | |
WEIGHTS_BY_MINER: dict[Hotkey, list[tuple[Hotkey, Weight]]] = {} | |
VALIDATOR_IDENTITIES: dict[Hotkey, str] = {} | |
UPDATED: dict[Hotkey, int] = {} | |
UIDS_BY_HOTKEY: dict[Hotkey, Uid] = {} | |
HOTKEYS_BY_UID: dict[Uid, Hotkey] = {} | |
substrate = get_substrate() | |
metagraph = Metagraph(substrate, netuid=NET_UID, load_old_nodes=False) | |
def query_subtensor(storage_keys: list[StorageKey], block: int) -> list: | |
global substrate | |
try: | |
return substrate.query_multi( | |
storage_keys=storage_keys, | |
block_hash=substrate.get_block_hash(block), | |
) | |
except Exception: | |
substrate = get_substrate() | |
raise | |
def is_validator(node: Node) -> bool: | |
return node.vtrust > 0 or node.stake > 10_000 | |
def get_nodes() -> dict[Hotkey, Node]: | |
return metagraph.nodes | |
def fetch_weights(block: int): | |
WEIGHTS_BY_MINER.clear() | |
storage_keys: list[StorageKey] = [] | |
for hotkey, node in metagraph.nodes.items(): | |
if not is_validator(node): continue | |
storage_keys.append(substrate.create_storage_key( | |
"SubtensorModule", | |
"Weights", | |
[metagraph.netuid, UIDS_BY_HOTKEY[node.hotkey]] | |
)) | |
weights = query_subtensor(storage_keys, block) | |
for hotkey, node in metagraph.nodes.items(): | |
for storage, validator_weights in weights: | |
validator_hotkey = HOTKEYS_BY_UID[storage.params[1]] | |
if hotkey not in WEIGHTS_BY_MINER: | |
WEIGHTS_BY_MINER[hotkey] = [] | |
weight = 0.0 | |
for miner_weight in validator_weights: | |
if miner_weight[0].value == UIDS_BY_HOTKEY[hotkey]: | |
weight = miner_weight[1].value / 2 ** 16 | |
break | |
WEIGHTS_BY_MINER[hotkey].append((validator_hotkey, weight)) | |
def fetch_updated(block: int): | |
UPDATED.clear() | |
for hotkey, node in metagraph.nodes.items(): | |
UPDATED[hotkey] = ceil(block - node.last_updated) | |
def fetch_identities(block: int): | |
VALIDATOR_IDENTITIES.clear() | |
storage_keys: list[StorageKey] = [] | |
for hotkey, node in metagraph.nodes.items(): | |
if not is_validator(node): continue | |
storage_keys.append(substrate.create_storage_key( | |
"SubtensorModule", | |
"Identities", | |
[node.coldkey] | |
)) | |
identities = query_subtensor(storage_keys, block) | |
for hotkey, node in metagraph.nodes.items(): | |
for storage, info in identities: | |
if node.coldkey != storage.params[0]: continue | |
if info != None: # noqa | |
VALIDATOR_IDENTITIES[hotkey] = info.value["name"] | |
break | |
last_sync: datetime = datetime.fromtimestamp(0, TIMEZONE) | |
last_identity_sync: datetime = datetime.fromtimestamp(0, TIMEZONE) | |
def sync_metagraph(timeout: int = 10): | |
global last_sync | |
now = datetime.now(TIMEZONE) | |
if now - last_sync < timedelta(minutes=5): | |
return | |
last_sync = now | |
def sync_task(): | |
print("Syncing metagraph...") | |
block = substrate.get_block_number(None) # type: ignore | |
metagraph.sync_nodes() | |
for uid, node in enumerate(metagraph.nodes.values()): | |
UIDS_BY_HOTKEY[node.hotkey] = uid | |
HOTKEYS_BY_UID[uid] = node.hotkey | |
fetch_weights(block) | |
fetch_updated(block) | |
global last_identity_sync | |
if now - last_identity_sync < timedelta(hours=12): | |
return | |
last_identity_sync = now | |
fetch_identities(block) | |
with ThreadPoolExecutor(max_workers=1) as executor: | |
future = executor.submit(sync_task) | |
try: | |
future.result(timeout=timeout) | |
except TimeoutError: | |
print("Timed out while syncing metagraph") | |
except Exception as e: | |
print(f"Error occurred while syncing metagraph: {e}") | |