Spaces:
Running
Running
File size: 6,688 Bytes
872d532 8cdce17 6c858ba 8cdce17 4ebccb1 6c858ba dfdce18 4ebccb1 6c858ba 8cdce17 dfdce18 6c858ba 8cdce17 6c858ba 4ebccb1 8cdce17 4ebccb1 6c858ba 4ebccb1 6c858ba 4ebccb1 6c858ba 4ebccb1 6c858ba 4ebccb1 6c858ba 4ebccb1 6c858ba 84168a6 4ebccb1 6c858ba 84168a6 872d532 6c858ba 4ebccb1 6c858ba 4ebccb1 6c858ba 4ebccb1 6c858ba 8cdce17 21a364f 8cdce17 6c858ba 8cdce17 6c858ba 872d532 c656c20 872d532 84168a6 872d532 84168a6 4ebccb1 84168a6 4ebccb1 872d532 84168a6 8cdce17 872d532 c656c20 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 |
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass
from datetime import datetime, timedelta
from enum import Enum
from math import ceil
from typing import TypeAlias
from fiber.chain.commitments import _deserialize_commitment_field
from fiber.chain.interface import get_substrate
from fiber.chain.metagraph import Metagraph
from fiber.chain.models import Node
from substrateinterface.storage import StorageKey
from network_commitments import Decoder
from wandb_data import TIMEZONE, Hotkey, Uid
Weight: TypeAlias = float
Incentive: TypeAlias = float
class ContestId(Enum):
SDXL_APPLE_SILICON = 0
SDXL_NEWDREAM_NVIDIA_4090 = 1
FLUX_NVIDIA_4090 = 2
@dataclass
class Commitment:
provider: str
repository: str
revision: str
contest: ContestId
block: int
@classmethod
def decode(cls, decoder: Decoder, block: int):
provider = decoder.read_str()
repository = decoder.read_str()
revision = decoder.read_sized_str(7)
contest_id = ContestId(decoder.read_uint16())
return cls(
provider=provider,
repository=repository,
revision=revision,
contest=contest_id,
block=block
)
def get_repo_link(self):
return f"https://{self.provider}/{self.repository}"
SPEC_VERSION = 7
NET_UID = 39
WEIGHTS_BY_MINER: dict[Hotkey, list[tuple[Hotkey, Weight]]] = {}
VALIDATOR_IDENTITIES: dict[Hotkey, str] = {}
COMMITMENTS: dict[Hotkey, Commitment] = {}
UPDATED: dict[Hotkey, int] = {}
UIDS_BY_HOTKEY: dict[Hotkey, Uid] = {}
HOTKEYS_BY_UID: dict[Uid, Hotkey] = {}
substrate = get_substrate()
metagraph = Metagraph(substrate, netuid=NET_UID, load_old_nodes=False)
def query_subtensor(storage_keys: list[StorageKey], block: int) -> list:
global substrate
try:
return substrate.query_multi(
storage_keys=storage_keys,
block_hash=substrate.get_block_hash(block),
)
except Exception:
substrate = get_substrate()
raise
def is_validator(node: Node) -> bool:
return node.vtrust > 0 or node.stake > 10_000
def get_nodes() -> dict[Hotkey, Node]:
return metagraph.nodes
def fetch_weights(block: int):
WEIGHTS_BY_MINER.clear()
storage_keys: list[StorageKey] = []
for hotkey, node in metagraph.nodes.items():
if not is_validator(node): continue
storage_keys.append(substrate.create_storage_key(
"SubtensorModule",
"Weights",
[metagraph.netuid, UIDS_BY_HOTKEY[node.hotkey]]
))
weights = query_subtensor(storage_keys, block)
for hotkey, node in metagraph.nodes.items():
for storage, validator_weights in weights:
validator_hotkey = HOTKEYS_BY_UID[storage.params[1]]
if hotkey not in WEIGHTS_BY_MINER:
WEIGHTS_BY_MINER[hotkey] = []
weight = 0.0
for miner_weight in validator_weights:
if miner_weight[0].value == UIDS_BY_HOTKEY[hotkey]:
weight = miner_weight[1].value / 2 ** 16
break
WEIGHTS_BY_MINER[hotkey].append((validator_hotkey, weight))
def fetch_updated(block: int):
UPDATED.clear()
for hotkey, node in metagraph.nodes.items():
UPDATED[hotkey] = ceil(block - node.last_updated)
def fetch_identities(block: int):
VALIDATOR_IDENTITIES.clear()
storage_keys: list[StorageKey] = []
for hotkey, node in metagraph.nodes.items():
if not is_validator(node): continue
storage_keys.append(substrate.create_storage_key(
"SubtensorModule",
"Identities",
[node.coldkey]
))
identities = query_subtensor(storage_keys, block)
for hotkey, node in metagraph.nodes.items():
for storage, info in identities:
if node.coldkey != storage.params[0]: continue
if info != None: # noqa
VALIDATOR_IDENTITIES[hotkey] = info.value["name"]
break
def fetch_commitments(block: int):
COMMITMENTS.clear()
storage_keys: list[StorageKey] = []
for hotkey, node in metagraph.nodes.items():
if is_validator(node): continue
storage_keys.append(substrate.create_storage_key(
"Commitments",
"CommitmentOf",
[metagraph.netuid, hotkey]
))
commitments = query_subtensor(storage_keys, block)
for storage, commitment in commitments:
try:
if not commitment or not commitment.value:
continue
fields = commitment.value["info"]["fields"]
if not fields:
continue
field = _deserialize_commitment_field(fields[0])
if field is None:
continue
decoder = Decoder(field[1])
spec_version = decoder.read_uint16()
if spec_version != SPEC_VERSION:
continue
COMMITMENTS[storage.params[1]] = Commitment.decode(decoder, int(commitment.value["block"]))
except:
continue
last_sync: datetime = datetime.fromtimestamp(0, TIMEZONE)
last_identity_sync: datetime = datetime.fromtimestamp(0, TIMEZONE)
last_commitment_sync: datetime = datetime.fromtimestamp(0, TIMEZONE)
def sync_metagraph(timeout: int = 10):
global substrate
global last_sync
now = datetime.now(TIMEZONE)
if now - last_sync < timedelta(minutes=5):
return
last_sync = now
def sync_task():
print("Syncing metagraph...")
block = substrate.get_block_number(None) # type: ignore
metagraph.sync_nodes()
for uid, node in enumerate(metagraph.nodes.values()):
UIDS_BY_HOTKEY[node.hotkey] = uid
HOTKEYS_BY_UID[uid] = node.hotkey
fetch_weights(block)
fetch_updated(block)
global last_identity_sync
if now - last_identity_sync > timedelta(days=1):
print("Syncing identities...")
last_identity_sync = now
fetch_identities(block)
global last_commitment_sync
if now - last_commitment_sync > timedelta(hours=12):
print("Syncing commitments...")
last_commitment_sync = now
fetch_commitments(block)
with ThreadPoolExecutor(max_workers=1) as executor:
future = executor.submit(sync_task)
try:
future.result(timeout=timeout)
except TimeoutError:
print("Timed out while syncing metagraph")
except Exception as e:
print(f"Error occurred while syncing metagraph: {e}")
substrate = get_substrate()
|