from concurrent.futures import ThreadPoolExecutor from dataclasses import dataclass from datetime import datetime, timedelta from enum import Enum from math import ceil from typing import TypeAlias from fiber.chain.commitments import _deserialize_commitment_field from fiber.chain.interface import get_substrate from fiber.chain.metagraph import Metagraph from fiber.chain.models import Node from substrateinterface.storage import StorageKey from network_commitments import Decoder from wandb_data import TIMEZONE, Hotkey, Uid Weight: TypeAlias = float Incentive: TypeAlias = float class ContestId(Enum): SDXL_APPLE_SILICON = 0 SDXL_NEWDREAM_NVIDIA_4090 = 1 FLUX_NVIDIA_4090 = 2 @dataclass class Commitment: provider: str repository: str revision: str contest: ContestId block: int @classmethod def decode(cls, decoder: Decoder, block: int): provider = decoder.read_str() repository = decoder.read_str() revision = decoder.read_sized_str(7) contest_id = ContestId(decoder.read_uint16()) return cls( provider=provider, repository=repository, revision=revision, contest=contest_id, block=block ) def get_repo_link(self): return f"https://{self.provider}/{self.repository}" SPEC_VERSION = 7 NET_UID = 39 WEIGHTS_BY_MINER: dict[Hotkey, list[tuple[Hotkey, Weight]]] = {} VALIDATOR_IDENTITIES: dict[Hotkey, str] = {} COMMITMENTS: dict[Hotkey, Commitment] = {} UPDATED: dict[Hotkey, int] = {} UIDS_BY_HOTKEY: dict[Hotkey, Uid] = {} HOTKEYS_BY_UID: dict[Uid, Hotkey] = {} substrate = get_substrate() metagraph = Metagraph(substrate, netuid=NET_UID, load_old_nodes=False) def query_subtensor(storage_keys: list[StorageKey], block: int) -> list: global substrate try: return substrate.query_multi( storage_keys=storage_keys, block_hash=substrate.get_block_hash(block), ) except Exception: substrate = get_substrate() raise def is_validator(node: Node) -> bool: return node.vtrust > 0 or node.stake > 10_000 def get_nodes() -> dict[Hotkey, Node]: return metagraph.nodes def fetch_weights(block: int): WEIGHTS_BY_MINER.clear() storage_keys: list[StorageKey] = [] for hotkey, node in metagraph.nodes.items(): if not is_validator(node): continue storage_keys.append(substrate.create_storage_key( "SubtensorModule", "Weights", [metagraph.netuid, UIDS_BY_HOTKEY[node.hotkey]] )) weights = query_subtensor(storage_keys, block) for hotkey, node in metagraph.nodes.items(): for storage, validator_weights in weights: validator_hotkey = HOTKEYS_BY_UID[storage.params[1]] if hotkey not in WEIGHTS_BY_MINER: WEIGHTS_BY_MINER[hotkey] = [] weight = 0.0 for miner_weight in validator_weights: if miner_weight[0].value == UIDS_BY_HOTKEY[hotkey]: weight = miner_weight[1].value / 2 ** 16 break WEIGHTS_BY_MINER[hotkey].append((validator_hotkey, weight)) def fetch_updated(block: int): UPDATED.clear() for hotkey, node in metagraph.nodes.items(): UPDATED[hotkey] = ceil(block - node.last_updated) def fetch_identities(block: int): VALIDATOR_IDENTITIES.clear() storage_keys: list[StorageKey] = [] for hotkey, node in metagraph.nodes.items(): if not is_validator(node): continue storage_keys.append(substrate.create_storage_key( "SubtensorModule", "Identities", [node.coldkey] )) identities = query_subtensor(storage_keys, block) for hotkey, node in metagraph.nodes.items(): for storage, info in identities: if node.coldkey != storage.params[0]: continue if info != None: # noqa VALIDATOR_IDENTITIES[hotkey] = info.value["name"] break def fetch_commitments(block: int): COMMITMENTS.clear() storage_keys: list[StorageKey] = [] for hotkey, node in metagraph.nodes.items(): if is_validator(node): continue storage_keys.append(substrate.create_storage_key( "Commitments", "CommitmentOf", [metagraph.netuid, hotkey] )) commitments = query_subtensor(storage_keys, block) for storage, commitment in commitments: try: if not commitment or not commitment.value: continue fields = commitment.value["info"]["fields"] if not fields: continue field = _deserialize_commitment_field(fields[0]) if field is None: continue decoder = Decoder(field[1]) spec_version = decoder.read_uint16() if spec_version != SPEC_VERSION: continue COMMITMENTS[storage.params[1]] = Commitment.decode(decoder, int(commitment.value["block"])) except: continue last_sync: datetime = datetime.fromtimestamp(0, TIMEZONE) last_identity_sync: datetime = datetime.fromtimestamp(0, TIMEZONE) last_commitment_sync: datetime = datetime.fromtimestamp(0, TIMEZONE) def sync_metagraph(timeout: int = 10): global substrate global last_sync now = datetime.now(TIMEZONE) if now - last_sync < timedelta(minutes=5): return last_sync = now def sync_task(): print("Syncing metagraph...") block = substrate.get_block_number(None) # type: ignore metagraph.sync_nodes() for uid, node in enumerate(metagraph.nodes.values()): UIDS_BY_HOTKEY[node.hotkey] = uid HOTKEYS_BY_UID[uid] = node.hotkey fetch_weights(block) fetch_updated(block) global last_identity_sync if now - last_identity_sync > timedelta(days=1): print("Syncing identities...") last_identity_sync = now fetch_identities(block) global last_commitment_sync if now - last_commitment_sync > timedelta(hours=12): print("Syncing commitments...") last_commitment_sync = now fetch_commitments(block) with ThreadPoolExecutor(max_workers=1) as executor: future = executor.submit(sync_task) try: future.result(timeout=timeout) except TimeoutError: print("Timed out while syncing metagraph") except Exception as e: print(f"Error occurred while syncing metagraph: {e}") substrate = get_substrate()