Spaces:
Paused
Paused
Cleanup functions and add subtensor-based block-time converter
Browse files- meta2frame.py +27 -92
meta2frame.py
CHANGED
@@ -9,31 +9,8 @@ import bittensor as bt
|
|
9 |
import pandas as pd
|
10 |
import plotly.express as px
|
11 |
|
12 |
-
def trust(W, S, threshold=0):
|
13 |
-
"""Trust vector for subnets with variable threshold"""
|
14 |
|
15 |
-
|
16 |
-
Wn = (W > 0).float()
|
17 |
-
return Wn.T @ S
|
18 |
-
# return ( (W > threshold)*S.reshape(-1,1) ).sum(axis=0)
|
19 |
-
|
20 |
-
def rank(W, S):
|
21 |
-
"""Rank vector for subnets"""
|
22 |
-
# assert (S.sum()-1).abs()<1e-4, f'Stake must sum to 1, got {S.sum()}'
|
23 |
-
|
24 |
-
R = W.T @ S
|
25 |
-
return R / R.sum()
|
26 |
-
|
27 |
-
def emission(C, R):
|
28 |
-
"""Emission vector for subnets"""
|
29 |
-
|
30 |
-
E = C*R
|
31 |
-
return E / E.sum()
|
32 |
-
|
33 |
-
def YC1(T, a=0.5, b=10):
|
34 |
-
"""Yuma Consensus 1"""
|
35 |
-
|
36 |
-
return torch.sigmoid( b * (T - a) )
|
37 |
|
38 |
|
39 |
def load_metagraphs(root_dir, netuid, block_min=0, block_max=3_000_000):
|
@@ -42,10 +19,11 @@ def load_metagraphs(root_dir, netuid, block_min=0, block_max=3_000_000):
|
|
42 |
match_path = os.path.join(root_dir, str(netuid), '*.pkl')
|
43 |
files = glob.glob(match_path)
|
44 |
print(f'Found {len(files)} metagraphs in {match_path}')
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
|
|
49 |
with open(path, 'rb') as f:
|
50 |
metagraph = pickle.load(f)
|
51 |
metagraphs.append(metagraph)
|
@@ -53,75 +31,30 @@ def load_metagraphs(root_dir, netuid, block_min=0, block_max=3_000_000):
|
|
53 |
return sorted(metagraphs, key=lambda x: x.block)
|
54 |
|
55 |
|
56 |
-
|
57 |
-
def plot_emission_trend(metagraphs, netuid, max_uids=32):
|
58 |
|
59 |
-
|
60 |
-
|
|
|
61 |
|
62 |
-
for metagraph in metagraphs:
|
63 |
-
E = m.W.mean(axis=0)
|
64 |
-
df = pd.concat([df, pd.DataFrame({'emission':E}).assign(block=metagraph.block)])
|
65 |
|
66 |
-
|
67 |
|
68 |
-
|
69 |
-
|
70 |
|
71 |
-
|
72 |
-
|
73 |
-
fig.update_yaxes(title_text='Emission')
|
74 |
-
fig.show()
|
75 |
|
76 |
-
|
|
|
|
|
|
|
77 |
|
78 |
-
|
79 |
-
if not isinstance(blocks, pd.Series):
|
80 |
-
blocks = pd.Series(blocks)
|
81 |
-
|
82 |
-
block_time_500k = datetime.datetime(2023, 5, 29, 5, 29, 0)
|
83 |
-
block_time_800k = datetime.datetime(2023, 7, 9, 21, 32, 48)
|
84 |
-
dt = (pd.Timestamp(block_time_800k)-pd.Timestamp(block_time_500k))/(800_000-500_000)
|
85 |
-
return block_time_500k + dt*(blocks-500_000)
|
86 |
-
|
87 |
-
root_dir = os.path.expanduser('~/Desktop/py/opentensor/metagraph/subnets/')
|
88 |
-
|
89 |
-
metagraphs = load_metagraphs(root_dir, 0)
|
90 |
-
metagraphs
|
91 |
-
|
92 |
-
def make_dataframe_old(metagraphs, netuid):
|
93 |
-
df = pd.DataFrame()
|
94 |
-
# max_uids=max(m.W.shape[1] for m in metagraphs)
|
95 |
-
for metagraph in sorted(metagraphs, key=lambda m: m.block):
|
96 |
-
if metagraph.n.item() == 0:
|
97 |
-
print(f'Block {metagraph.block} has no nodes, skipping')
|
98 |
-
continue
|
99 |
-
|
100 |
-
if netuid == 0:
|
101 |
-
W = metagraph.W.float()
|
102 |
-
Sn = (metagraph.S/metagraph.S.sum()).clone().float()
|
103 |
-
|
104 |
-
T = trust(W, Sn)
|
105 |
-
R = rank(W, Sn)
|
106 |
-
C = YC1(T)
|
107 |
-
E = emission(C, R)
|
108 |
-
else:
|
109 |
-
T = metagraph.T
|
110 |
-
R = metagraph.R
|
111 |
-
C = metagraph.C
|
112 |
-
E = metagraph.E
|
113 |
-
|
114 |
-
frame = pd.DataFrame({'Trust':T, 'Rank':R, 'Consensus':C, 'Emission':E, 'uid':range(len(E))}).assign(block=metagraph.block.item(), netuid=netuid)
|
115 |
-
df = pd.concat([df, frame])
|
116 |
-
|
117 |
-
df['alive'] = df.groupby('netuid')['Emission'].transform(lambda x: x > 0)
|
118 |
-
df['owner_take'] = df['Emission'] * 7200 * 0.18
|
119 |
-
df['timestamp'] = block_to_time(df['block'])
|
120 |
-
df['day'] = df['timestamp'].dt.dayofyear
|
121 |
-
df.sort_values(by=['block','netuid'], inplace=True)
|
122 |
-
return df
|
123 |
|
124 |
-
|
|
|
125 |
if cols is None:
|
126 |
cols = ['stake','emission','trust','validator_trust','dividends','incentive','R', 'consensus','validator_permit']
|
127 |
frames = []
|
@@ -130,7 +63,6 @@ def make_dataframe(root_dir, netuid, cols=None, block_min=0, block_max=3_000_000
|
|
130 |
for m in metagraphs:
|
131 |
frame = pd.DataFrame({k: getattr(m, k) for k in cols})
|
132 |
frame['block'] = m.block.item()
|
133 |
-
frame['timestamp'] = block_to_time(frame['block'])
|
134 |
frame['netuid'] = netuid
|
135 |
frame['uid'] = range(len(frame))
|
136 |
frame['hotkey'] = [axon.hotkey for axon in m.axons]
|
@@ -138,6 +70,9 @@ def make_dataframe(root_dir, netuid, cols=None, block_min=0, block_max=3_000_000
|
|
138 |
if weights and m.W is not None:
|
139 |
# convert NxN tensor to a list of lists so it fits into the dataframe
|
140 |
frame['weights'] = [w.tolist() for w in m.W]
|
141 |
-
|
142 |
frames.append(frame)
|
143 |
-
|
|
|
|
|
|
|
|
9 |
import pandas as pd
|
10 |
import plotly.express as px
|
11 |
|
|
|
|
|
12 |
|
13 |
+
ROOT_DIR = './data/metagraph/'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
|
15 |
|
16 |
def load_metagraphs(root_dir, netuid, block_min=0, block_max=3_000_000):
|
|
|
19 |
match_path = os.path.join(root_dir, str(netuid), '*.pkl')
|
20 |
files = glob.glob(match_path)
|
21 |
print(f'Found {len(files)} metagraphs in {match_path}')
|
22 |
+
|
23 |
+
valid_files = [path for path in files if block_min <= int(path.split('/')[-1].split('.')[0]) <= block_max]
|
24 |
+
print(f'Found {len(valid_files)} valid metagraphs between {block_min} and {block_max}')
|
25 |
+
for path in tqdm.tqdm(valid_files):
|
26 |
+
|
27 |
with open(path, 'rb') as f:
|
28 |
metagraph = pickle.load(f)
|
29 |
metagraphs.append(metagraph)
|
|
|
31 |
return sorted(metagraphs, key=lambda x: x.block)
|
32 |
|
33 |
|
34 |
+
def get_block_timestamp(block, subtensor):
|
|
|
35 |
|
36 |
+
info = subtensor.substrate.get_block(block_number=int(block))
|
37 |
+
extrinsic_call = info['extrinsics'][0]['call']
|
38 |
+
return extrinsic_call.value_serialized['call_args'][0]['value']
|
39 |
|
|
|
|
|
|
|
40 |
|
41 |
+
def block_to_time(blocks, subtensor=None):
|
42 |
|
43 |
+
if not isinstance(blocks, pd.Series):
|
44 |
+
blocks = pd.Series(blocks)
|
45 |
|
46 |
+
if subtensor is None:
|
47 |
+
subtensor = bt.subtensor(network='archive')
|
|
|
|
|
48 |
|
49 |
+
timestamps = {}
|
50 |
+
unique_blocks = set(blocks)
|
51 |
+
for block in tqdm.tqdm(unique_blocks):
|
52 |
+
timestamps[block] = get_block_timestamp(block, subtensor)
|
53 |
|
54 |
+
return blocks.map(timestamps).apply(pd.to_datetime, unit='ms')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
55 |
|
56 |
+
|
57 |
+
def make_dataframe(netuid, root_dir=ROOT_DIR, cols=None, block_min=0, block_max=3_000_000, weights=False):
|
58 |
if cols is None:
|
59 |
cols = ['stake','emission','trust','validator_trust','dividends','incentive','R', 'consensus','validator_permit']
|
60 |
frames = []
|
|
|
63 |
for m in metagraphs:
|
64 |
frame = pd.DataFrame({k: getattr(m, k) for k in cols})
|
65 |
frame['block'] = m.block.item()
|
|
|
66 |
frame['netuid'] = netuid
|
67 |
frame['uid'] = range(len(frame))
|
68 |
frame['hotkey'] = [axon.hotkey for axon in m.axons]
|
|
|
70 |
if weights and m.W is not None:
|
71 |
# convert NxN tensor to a list of lists so it fits into the dataframe
|
72 |
frame['weights'] = [w.tolist() for w in m.W]
|
73 |
+
|
74 |
frames.append(frame)
|
75 |
+
|
76 |
+
df = pd.concat(frames)
|
77 |
+
df['timestamp'] = block_to_time(df['block'])
|
78 |
+
return df.sort_values(by=['timestamp','block','uid'])
|