File size: 4,379 Bytes
ab35f6e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
import glob
import tqdm
import pickle
import os
import datetime

import torch
import bittensor as bt
import pandas as pd
import plotly.express as px

def trust(W, S, threshold=0):
    """Trust vector for subnets with variable threshold"""

    # assert (S.sum()-1).abs()<1e-4, f'Stake must sum to 1, got {S.sum()}'
    Wn = (W > 0).float()
    return Wn.T @ S
    # return ( (W > threshold)*S.reshape(-1,1) ).sum(axis=0)

def rank(W, S):
    """Rank vector for subnets"""
    # assert (S.sum()-1).abs()<1e-4, f'Stake must sum to 1, got {S.sum()}'

    R = W.T @ S
    return R / R.sum()

def emission(C, R):
    """Emission vector for subnets"""

    E = C*R
    return E / E.sum()

def YC1(T, a=0.5, b=10):
    """Yuma Consensus 1"""

    return torch.sigmoid( b * (T - a) )


def load_metagraphs(root_dir, netuid):

    metagraphs = []
    match_path = os.path.join(root_dir, str(netuid), '*.pkl')
    files = glob.glob(match_path)
    print(f'Found {len(files)} metagraphs in {match_path}')
    for path in tqdm.tqdm(files):

        with open(path, 'rb') as f:
            metagraph = pickle.load(f)
            metagraphs.append(metagraph)

    return metagraphs


# TODO: can calculate the emission trend using each subnet or just using root subnet
def plot_emission_trend(metagraphs, netuid, max_uids=32):

    df = pd.DataFrame()
    max_uids = max_uids or max(m.W.shape[1] for m in metagraphs)

    for metagraph in metagraphs:
        E = m.W.mean(axis=0)
        df = pd.concat([df, pd.DataFrame({'emission':E}).assign(block=metagraph.block)])

    df.sort_values(by='block', inplace=True)

    fig = px.line(df, x=df.index, y='emission',line_group='',
                  title='Emission Trend',

                  width=800, height=600, template='plotly_white')
    fig.update_xaxes(title_text='Block Height')
    fig.update_yaxes(title_text='Emission')
    fig.show()

    return fig

def block_to_time(blocks):
    if not isinstance(blocks, pd.Series):
        blocks = pd.Series(blocks)
    
    block_time_500k = datetime.datetime(2023, 5, 29, 5, 29, 0)
    block_time_800k = datetime.datetime(2023, 7, 9, 21, 32, 48)
    dt = (pd.Timestamp(block_time_800k)-pd.Timestamp(block_time_500k))/(800_000-500_000)
    return block_time_500k + dt*(blocks-500_000)

root_dir = os.path.expanduser('~/Desktop/py/opentensor/metagraph/subnets/')

metagraphs = load_metagraphs(root_dir, 0)
metagraphs

def make_dataframe_old(metagraphs, netuid):
    df = pd.DataFrame()
    # max_uids=max(m.W.shape[1] for m in metagraphs)
    for metagraph in sorted(metagraphs, key=lambda m: m.block):
        if metagraph.n.item() == 0:
            print(f'Block {metagraph.block} has no nodes, skipping')
            continue

        if netuid == 0:
            W = metagraph.W.float()
            Sn = (metagraph.S/metagraph.S.sum()).clone().float()

            T = trust(W, Sn)
            R = rank(W, Sn)
            C = YC1(T)
            E = emission(C, R)
        else:
            T = metagraph.T
            R = metagraph.R
            C = metagraph.C
            E = metagraph.E

        frame = pd.DataFrame({'Trust':T, 'Rank':R, 'Consensus':C, 'Emission':E, 'uid':range(len(E))}).assign(block=metagraph.block.item(), netuid=netuid)
        df = pd.concat([df, frame])

    df['alive'] = df.groupby('netuid')['Emission'].transform(lambda x: x > 0)
    df['owner_take'] = df['Emission'] * 7200 * 0.18
    df['timestamp'] = block_to_time(df['block'])
    df['day'] = df['timestamp'].dt.dayofyear
    df.sort_values(by=['block','netuid'], inplace=True)
    return df

def make_dataframe(root_dir, netuid, cols=None):
    if cols is None:
        cols = ['stake','emission','trust','validator_trust','dividends','incentive','R', 'consensus','validator_permit']
    frames = []
    metagraphs = load_metagraphs(root_dir, netuid)
    print(f'Loaded {len(metagraphs)} metagraphs for netuid {netuid}')
    for m in metagraphs:
        frame = pd.DataFrame({k: getattr(m, k) for k in cols})
        frame['block'] = m.block.item()
        frame['timestamp'] = block_to_time(frame['block'])
        frame['netuid'] = netuid
        frame['uid'] = range(len(frame))
        frame['hotkey'] = [axon.hotkey for axon in m.axons]
        frame['coldkey'] = [axon.coldkey for axon in m.axons]
        frames.append(frame)
    return pd.concat(frames).sort_values(by=['timestamp','block','uid'])