Spaces:
Running
Running
File size: 7,114 Bytes
b343c97 be63e0f 6c858ba 8cdce17 5fb35aa 6c858ba 5bbddad ea6faf1 4ebccb1 5fb35aa 5bbddad 78f61d6 4ebccb1 6c858ba 8cdce17 20dfdf1 b343c97 5bbddad 6c858ba b343c97 8cdce17 022987a 8cdce17 b343c97 5bbddad 5fb35aa 8cdce17 5fb35aa 8cdce17 b343c97 63f8f2e 6c858ba 20dfdf1 6c858ba 20dfdf1 6c858ba ea6faf1 6c858ba 4ebccb1 41936a6 4ebccb1 6c858ba 4ebccb1 5bbddad 4ebccb1 5bbddad 4ebccb1 7de44a0 4ebccb1 5bbddad 4ebccb1 10cac9a 4ebccb1 5bbddad 4ebccb1 6c858ba 84168a6 4ebccb1 6c858ba 4ebccb1 5bbddad 4ebccb1 5bbddad 4ebccb1 5bbddad 4ebccb1 5bbddad b343c97 4ebccb1 6c858ba b343c97 5fb35aa b343c97 5fb35aa 8cdce17 5fb35aa 8cdce17 5bbddad 8cdce17 5fb35aa 8cdce17 10cac9a 8cdce17 10cac9a 8cdce17 10cac9a 8cdce17 5fb35aa 8cdce17 6c858ba 5bbddad 5fb35aa 5bbddad c656c20 872d532 84168a6 b343c97 5bbddad 4ebccb1 5bbddad 84168a6 4ebccb1 84168a6 8cdce17 be63e0f 5bbddad be63e0f 78f61d6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 |
import os
import traceback
from datetime import datetime, timedelta
from enum import Enum
from threading import Lock
from typing import TypeAlias
import requests
from cachetools import TTLCache, cached
from fiber import constants
from fiber.chain.interface import get_substrate
from fiber.chain.metagraph import Metagraph
from pydantic import BaseModel
from substrateinterface import SubstrateInterface
from substrateinterface.storage import StorageKey
from network_commitments import Decoder
from src import Key, Uid, TIMEZONE
TAOSTATS_API_KEY = os.getenv("TAOSTATS_API_KEY")
Weight: TypeAlias = float
Incentive: TypeAlias = float
class ContestId(Enum):
FLUX_NVIDIA_4090 = 0
SDXL_NEWDREAM_NVIDIA_4090 = 1
class Neuron(BaseModel):
hotkey: str
coldkey: str
validator_trust: float
validator_permit: bool
incentive: float
updated: int
uid: int
block_number: int
class Commitment(BaseModel):
url: str
revision: str
contest: ContestId
block: int
@classmethod
def decode(cls, decoder: Decoder, block: int):
provider = decoder.read_str()
repository = decoder.read_str()
revision = decoder.read_sized_str(7)
contest_id = ContestId(decoder.read_uint16())
return cls(
url=f"https://{provider}/{repository}",
revision=revision,
contest=contest_id,
block=block
)
SPEC_VERSION = 8
NET_UID = 39
WEIGHTS_BY_MINER: dict[Key, list[tuple[Key, Weight]]] = {}
VALIDATOR_IDENTITIES: dict[Key, str] = {}
HOTKEYS_BY_UID: dict[Uid, Key] = {}
substrate = get_substrate(subtensor_address=constants.FINNEY_SUBTENSOR_ADDRESS)
def query_subtensor(storage_keys: list[StorageKey], block: int) -> list:
global substrate
try:
return substrate.query_multi(
storage_keys=storage_keys,
block_hash=substrate.get_block_hash(block),
)
except Exception:
substrate = get_substrate(subtensor_address=substrate.url)
raise
def fetch_weights(block: int):
storage_keys: list[StorageKey] = []
for hotkey, neuron in get_neurons().items():
if not neuron.validator_permit: continue
storage_keys.append(substrate.create_storage_key(
"SubtensorModule",
"Weights",
[NET_UID, neuron.uid]
))
weights = query_subtensor(storage_keys, block)
WEIGHTS_BY_MINER.clear()
for hotkey, neuron in get_neurons().items():
for storage, validator_weights in weights:
if not validator_weights:
continue
validator_hotkey = HOTKEYS_BY_UID[storage.params[1]]
if hotkey not in WEIGHTS_BY_MINER:
WEIGHTS_BY_MINER[hotkey] = []
weight = 0.0
for miner_weight in validator_weights:
if miner_weight[0].value == neuron.uid:
weight = miner_weight[1].value / 2 ** 16
break
WEIGHTS_BY_MINER[hotkey].append((validator_hotkey, weight))
def fetch_identities(block: int):
VALIDATOR_IDENTITIES.clear()
storage_keys: list[StorageKey] = []
for hotkey, neuron in get_neurons().items():
if not neuron.validator_permit: continue
storage_keys.append(substrate.create_storage_key(
"SubtensorModule",
"Identities",
[neuron.coldkey]
))
identities = query_subtensor(storage_keys, block)
for hotkey, neuron in get_neurons().items():
for storage, info in identities:
if neuron.coldkey != storage.params[0]: continue
if info != None: # noqa
VALIDATOR_IDENTITIES[hotkey] = info.value["name"]
break
lock = Lock()
@cached(cache=TTLCache(maxsize=1, ttl=12 * 60 * 60), lock=lock)
def fetch_commitments() -> dict[Key, Commitment]:
substrate = get_substrate(subtensor_address=constants.FINNEY_SUBTENSOR_ADDRESS)
block = substrate.get_block_number(None) # type: ignore
metagraph = Metagraph(substrate=substrate, netuid=NET_UID, load_old_nodes=False)
metagraph.sync_nodes()
nodes = {
key: node for key, node
in metagraph.nodes.items()
}
print("Fetching commitments...")
commitments: dict[Key, Commitment] = {}
storage_keys: list[StorageKey] = []
for hotkey, node in nodes.items():
if node.vtrust > 0:
continue
storage_keys.append(substrate.create_storage_key(
"Commitments",
"CommitmentOf",
[NET_UID, hotkey]
))
results = substrate.query_multi(
storage_keys=storage_keys,
block_hash=substrate.get_block_hash(block),
)
for storage, commitment in results:
try:
if not commitment:
continue
field = bytes(next(iter(commitment["info"]["fields"][0][0].values()))[0])
decoder = Decoder(field)
spec_version = decoder.read_uint16()
if spec_version != SPEC_VERSION:
continue
commitments[storage.params[1]] = Commitment.decode(decoder, int(commitment["block"]))
except:
traceback.print_exc()
return commitments
last_sync: datetime = datetime.fromtimestamp(0, TIMEZONE)
last_identity_sync: datetime = datetime.fromtimestamp(0, TIMEZONE)
@cached(cache=TTLCache(maxsize=1, ttl=120))
def get_neurons() -> dict[Key, Neuron]:
response = requests.get(
"https://api.taostats.io/api/metagraph/latest/v1",
headers={
"accept": "application/json",
"Authorization": TAOSTATS_API_KEY,
},
params={
"netuid": 39,
}
)
response.raise_for_status()
return {
neuron["hotkey"]["ss58"]: Neuron(
hotkey=neuron["hotkey"]["ss58"],
coldkey=neuron["coldkey"]["ss58"],
validator_trust=float(neuron["validator_trust"]),
validator_permit=bool(neuron["validator_permit"]),
incentive=float(neuron["incentive"]),
updated=int(neuron["updated"]),
uid=int(neuron["uid"]),
block_number=int(neuron["block_number"]),
) for neuron in response.json()["data"]
}
def get_latest_block():
return max([neuron.block_number for neuron in get_neurons().values()])
def sync_chain():
global substrate
global last_sync
now = datetime.now(TIMEZONE)
if now - last_sync < timedelta(minutes=5):
return
last_sync = now
try:
print("Syncing chain...")
block = get_latest_block()
for hotkey, neuron in get_neurons().items():
HOTKEYS_BY_UID[neuron.uid] = hotkey
fetch_weights(block)
global last_identity_sync
if now - last_identity_sync > timedelta(days=1):
print("Syncing identities...")
last_identity_sync = now
fetch_identities(block)
except Exception:
print(f"Error occurred while syncing chain")
traceback.print_exc()
substrate = SubstrateInterface(substrate.url)
|