AlexNijjar commited on
Commit
5fb35aa
·
unverified ·
1 Parent(s): ad230c0

Update gradio, fix not showing all submissions

Browse files
Files changed (5) hide show
  1. README.md +1 -1
  2. requirements.txt +2 -2
  3. src/chain_data.py +32 -28
  4. src/submissions.py +4 -4
  5. src/wandb_data.py +2 -2
README.md CHANGED
@@ -4,7 +4,7 @@ emoji: 🏆
4
  colorFrom: purple
5
  colorTo: gray
6
  sdk: gradio
7
- sdk_version: 5.11.0
8
  app_file: src/app.py
9
  pinned: true
10
  ---
 
4
  colorFrom: purple
5
  colorTo: gray
6
  sdk: gradio
7
+ sdk_version: 5.13.1
8
  app_file: src/app.py
9
  pinned: true
10
  ---
requirements.txt CHANGED
@@ -1,9 +1,9 @@
1
  fiber @ git+https://github.com/rayonlabs/[email protected]#egg=fiber[chain]
2
- gradio==5.11.0
3
  wandb==0.19.2
4
  substrate-interface==1.7.10
5
  plotly==5.24.1
6
  pandas==2.2.3
7
  packaging==24.2
8
  netaddr==1.3.0
9
- cachetools==5.5.0
 
1
  fiber @ git+https://github.com/rayonlabs/[email protected]#egg=fiber[chain]
2
+ gradio==5.13.1
3
  wandb==0.19.2
4
  substrate-interface==1.7.10
5
  plotly==5.24.1
6
  pandas==2.2.3
7
  packaging==24.2
8
  netaddr==1.3.0
9
+ cachetools==5.5.1
src/chain_data.py CHANGED
@@ -1,8 +1,8 @@
1
  import os
2
  import traceback
3
- from dataclasses import dataclass
4
  from datetime import datetime, timedelta
5
  from enum import Enum
 
6
  from typing import TypeAlias
7
 
8
  import requests
@@ -10,6 +10,7 @@ from cachetools import TTLCache, cached
10
  from fiber import constants
11
  from fiber.chain.commitments import _deserialize_commitment_field
12
  from fiber.chain.interface import get_substrate
 
13
  from pydantic import BaseModel
14
  from substrateinterface import SubstrateInterface
15
  from substrateinterface.storage import StorageKey
@@ -18,7 +19,6 @@ from network_commitments import Decoder
18
  from src import Key, Uid, TIMEZONE
19
 
20
  TAOSTATS_API_KEY = os.getenv("TAOSTATS_API_KEY")
21
- DISABLE_COMMITMENTS_FETCH = int(os.getenv("DISABLE_COMMITMENTS_FETCH") or 0) > 0
22
 
23
  Weight: TypeAlias = float
24
  Incentive: TypeAlias = float
@@ -40,10 +40,8 @@ class Neuron(BaseModel):
40
  block_number: int
41
 
42
 
43
- @dataclass
44
- class Commitment:
45
- provider: str
46
- repository: str
47
  revision: str
48
  contest: ContestId
49
  block: int
@@ -56,22 +54,17 @@ class Commitment:
56
  contest_id = ContestId(decoder.read_uint16())
57
 
58
  return cls(
59
- provider=provider,
60
- repository=repository,
61
  revision=revision,
62
  contest=contest_id,
63
  block=block
64
  )
65
 
66
- def get_repo_link(self):
67
- return f"https://{self.provider}/{self.repository}"
68
-
69
 
70
  SPEC_VERSION = 8
71
  NET_UID = 39
72
  WEIGHTS_BY_MINER: dict[Key, list[tuple[Key, Weight]]] = {}
73
  VALIDATOR_IDENTITIES: dict[Key, str] = {}
74
- COMMITMENTS: dict[Key, Commitment] = {}
75
 
76
  HOTKEYS_BY_UID: dict[Uid, Key] = {}
77
 
@@ -136,22 +129,36 @@ def fetch_identities(block: int):
136
  break
137
 
138
 
139
- def fetch_commitments(block: int):
140
- if DISABLE_COMMITMENTS_FETCH:
141
- return
 
 
 
 
 
 
 
 
142
 
143
- COMMITMENTS.clear()
 
144
  storage_keys: list[StorageKey] = []
145
- for hotkey, neuron in get_neurons().items():
146
- if neuron.validator_permit: continue
 
147
  storage_keys.append(substrate.create_storage_key(
148
  "Commitments",
149
  "CommitmentOf",
150
  [NET_UID, hotkey]
151
  ))
152
 
153
- commitments = query_subtensor(storage_keys, block)
154
- for storage, commitment in commitments:
 
 
 
 
155
  try:
156
  if not commitment or not commitment.value:
157
  continue
@@ -169,14 +176,15 @@ def fetch_commitments(block: int):
169
  if spec_version != SPEC_VERSION:
170
  continue
171
 
172
- COMMITMENTS[storage.params[1]] = Commitment.decode(decoder, int(commitment.value["block"]))
173
  except:
174
- continue
 
 
175
 
176
 
177
  last_sync: datetime = datetime.fromtimestamp(0, TIMEZONE)
178
  last_identity_sync: datetime = datetime.fromtimestamp(0, TIMEZONE)
179
- last_commitment_sync: datetime = datetime.fromtimestamp(0, TIMEZONE)
180
 
181
 
182
  @cached(cache=TTLCache(maxsize=1, ttl=120))
@@ -207,6 +215,7 @@ def get_neurons() -> dict[Key, Neuron]:
207
  ) for neuron in response.json()["data"]
208
  }
209
 
 
210
  def get_latest_block():
211
  return max([neuron.block_number for neuron in get_neurons().values()])
212
 
@@ -235,11 +244,6 @@ def sync_chain():
235
  last_identity_sync = now
236
  fetch_identities(block)
237
 
238
- global last_commitment_sync
239
- if now - last_commitment_sync > timedelta(hours=12):
240
- print("Syncing commitments...")
241
- last_commitment_sync = now
242
- fetch_commitments(block)
243
  except Exception:
244
  print(f"Error occurred while syncing chain")
245
  traceback.print_exc()
 
1
  import os
2
  import traceback
 
3
  from datetime import datetime, timedelta
4
  from enum import Enum
5
+ from threading import Lock
6
  from typing import TypeAlias
7
 
8
  import requests
 
10
  from fiber import constants
11
  from fiber.chain.commitments import _deserialize_commitment_field
12
  from fiber.chain.interface import get_substrate
13
+ from fiber.chain.metagraph import Metagraph
14
  from pydantic import BaseModel
15
  from substrateinterface import SubstrateInterface
16
  from substrateinterface.storage import StorageKey
 
19
  from src import Key, Uid, TIMEZONE
20
 
21
  TAOSTATS_API_KEY = os.getenv("TAOSTATS_API_KEY")
 
22
 
23
  Weight: TypeAlias = float
24
  Incentive: TypeAlias = float
 
40
  block_number: int
41
 
42
 
43
+ class Commitment(BaseModel):
44
+ url: str
 
 
45
  revision: str
46
  contest: ContestId
47
  block: int
 
54
  contest_id = ContestId(decoder.read_uint16())
55
 
56
  return cls(
57
+ url=f"https://{provider}/{repository}",
 
58
  revision=revision,
59
  contest=contest_id,
60
  block=block
61
  )
62
 
 
 
 
63
 
64
  SPEC_VERSION = 8
65
  NET_UID = 39
66
  WEIGHTS_BY_MINER: dict[Key, list[tuple[Key, Weight]]] = {}
67
  VALIDATOR_IDENTITIES: dict[Key, str] = {}
 
68
 
69
  HOTKEYS_BY_UID: dict[Uid, Key] = {}
70
 
 
129
  break
130
 
131
 
132
+ lock = Lock()
133
+ @cached(cache=TTLCache(maxsize=1, ttl=12 * 60 * 60), lock=lock)
134
+ def fetch_commitments() -> dict[Key, Commitment]:
135
+ substrate = get_substrate(subtensor_address=constants.FINNEY_SUBTENSOR_ADDRESS)
136
+ block = substrate.get_block_number(None) # type: ignore
137
+ metagraph = Metagraph(substrate=substrate, netuid=NET_UID, load_old_nodes=False)
138
+ metagraph.sync_nodes()
139
+ nodes = {
140
+ key: node for key, node
141
+ in metagraph.nodes.items()
142
+ }
143
 
144
+ print("Fetching commitments...")
145
+ commitments: dict[Key, Commitment] = {}
146
  storage_keys: list[StorageKey] = []
147
+ for hotkey, node in nodes.items():
148
+ if node.vtrust > 0:
149
+ continue
150
  storage_keys.append(substrate.create_storage_key(
151
  "Commitments",
152
  "CommitmentOf",
153
  [NET_UID, hotkey]
154
  ))
155
 
156
+ results = substrate.query_multi(
157
+ storage_keys=storage_keys,
158
+ block_hash=substrate.get_block_hash(block),
159
+ )
160
+
161
+ for storage, commitment in results:
162
  try:
163
  if not commitment or not commitment.value:
164
  continue
 
176
  if spec_version != SPEC_VERSION:
177
  continue
178
 
179
+ commitments[storage.params[1]] = Commitment.decode(decoder, int(commitment.value["block"]))
180
  except:
181
+ traceback.print_exc()
182
+
183
+ return commitments
184
 
185
 
186
  last_sync: datetime = datetime.fromtimestamp(0, TIMEZONE)
187
  last_identity_sync: datetime = datetime.fromtimestamp(0, TIMEZONE)
 
188
 
189
 
190
  @cached(cache=TTLCache(maxsize=1, ttl=120))
 
215
  ) for neuron in response.json()["data"]
216
  }
217
 
218
+
219
  def get_latest_block():
220
  return max([neuron.block_number for neuron in get_neurons().values()])
221
 
 
244
  last_identity_sync = now
245
  fetch_identities(block)
246
 
 
 
 
 
 
247
  except Exception:
248
  print(f"Error occurred while syncing chain")
249
  traceback.print_exc()
src/submissions.py CHANGED
@@ -3,7 +3,7 @@ from enum import Enum
3
  import gradio as gr
4
  import pandas as pd
5
 
6
- from chain_data import sync_chain, COMMITMENTS
7
  from src import Key
8
  from src.chain_data import get_neurons
9
  from wandb_data import get_current_runs, Run, get_blacklisted_keys
@@ -48,7 +48,7 @@ def create_submissions(submission_filters: list[str]) -> gr.Dataframe:
48
  sync_chain()
49
  runs = sorted(get_current_runs(), key=lambda run: run.uid)
50
 
51
- for hotkey, commitment in COMMITMENTS.items():
52
  neuron = get_neurons().get(hotkey)
53
  if not neuron:
54
  continue
@@ -56,9 +56,9 @@ def create_submissions(submission_filters: list[str]) -> gr.Dataframe:
56
 
57
  row = [
58
  neuron.uid,
59
- f"[{'/'.join(commitment.get_repo_link().split('/')[-2:])}]({commitment.get_repo_link()})",
60
  f"[{commitment.block}](https://taostats.io/block/{commitment.block}/extrinsics)",
61
- f"[{commitment.revision}]({commitment.get_repo_link()}/commit/{commitment.revision})",
62
  f"[{hotkey[:6]}...](https://taostats.io/hotkey/{hotkey})",
63
  f"[{coldkey[:6]}...](https://taostats.io/coldkey/{coldkey})",
64
  commitment.contest.name,
 
3
  import gradio as gr
4
  import pandas as pd
5
 
6
+ from chain_data import sync_chain, fetch_commitments
7
  from src import Key
8
  from src.chain_data import get_neurons
9
  from wandb_data import get_current_runs, Run, get_blacklisted_keys
 
48
  sync_chain()
49
  runs = sorted(get_current_runs(), key=lambda run: run.uid)
50
 
51
+ for hotkey, commitment in fetch_commitments().items():
52
  neuron = get_neurons().get(hotkey)
53
  if not neuron:
54
  continue
 
56
 
57
  row = [
58
  neuron.uid,
59
+ f"[{'/'.join(commitment.url.split('/')[-2:])}]({commitment.url})",
60
  f"[{commitment.block}](https://taostats.io/block/{commitment.block}/extrinsics)",
61
+ f"[{commitment.revision}]({commitment.url}/commit/{commitment.revision})",
62
  f"[{hotkey[:6]}...](https://taostats.io/hotkey/{hotkey})",
63
  f"[{coldkey[:6]}...](https://taostats.io/coldkey/{coldkey})",
64
  commitment.contest.name,
src/wandb_data.py CHANGED
@@ -7,7 +7,7 @@ import requests
7
  import wandb
8
  from cachetools import TTLCache, cached
9
  import wandb.apis.public as wapi
10
- from pydantic import BaseModel, RootModel
11
  from substrateinterface import Keypair
12
 
13
  from chain_data import VALIDATOR_IDENTITIES, sync_chain
@@ -16,7 +16,7 @@ from src.chain_data import get_neurons
16
 
17
  WANDB_RUN_PATH = os.environ["WANDB_RUN_PATH"]
18
 
19
- START_DATE = datetime(2024, 11, 29)
20
  OFFSET_DAYS = 0
21
 
22
  BLACKLIST_ENDPOINT = "https://edge-inputs.api.wombo.ai/blacklist"
 
7
  import wandb
8
  from cachetools import TTLCache, cached
9
  import wandb.apis.public as wapi
10
+ from pydantic import BaseModel
11
  from substrateinterface import Keypair
12
 
13
  from chain_data import VALIDATOR_IDENTITIES, sync_chain
 
16
 
17
  WANDB_RUN_PATH = os.environ["WANDB_RUN_PATH"]
18
 
19
+ START_DATE = datetime(2025, 1, 28)
20
  OFFSET_DAYS = 0
21
 
22
  BLACKLIST_ENDPOINT = "https://edge-inputs.api.wombo.ai/blacklist"