File size: 2,785 Bytes
aad220f
 
 
 
6391563
 
2508d8e
 
6391563
2508d8e
 
aad220f
 
 
 
 
6391563
2508d8e
6391563
 
 
aad220f
 
 
 
 
2508d8e
6391563
5dfbe3b
6391563
 
 
2508d8e
 
60f885b
aad220f
 
 
 
 
 
 
 
 
 
 
 
 
 
6391563
aad220f
 
 
 
6391563
aad220f
 
 
 
6391563
 
aad220f
 
 
 
2508d8e
0872630
aad220f
2508d8e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
aad220f
6391563
 
aad220f
 
 
6391563
328256f
aad220f
 
 
 
 
 
6391563
aad220f
 
6391563
aad220f
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114

import atexit
import datetime

import pandas as pd
import uvicorn
from apscheduler.schedulers.background import BackgroundScheduler
from fastapi import FastAPI

import utils
from classes import Metagraph, Productivity, Throughput

# Global variables (saves time on loading data)
state_vars = None
reload_timestamp = datetime.datetime.now().strftime('%D %T')

data_all = None
data_30d = None
data_24h = None

app = FastAPI()

def load_data():
    """
    Reload the state variables
    """
    global data_all, data_30d ,data_24h, reload_timestamp
    
    utils.fetch_new_runs()
    
    data_all = utils.preload_data()
    
    data_30d = data_all[(pd.Timestamp.now() -  data_all['updated_at'].apply(lambda x: pd.Timestamp(x)) < pd.Timedelta('30 days'))]
    
    data_24h = data_all[(pd.Timestamp.now() -  data_all['updated_at'].apply(lambda x: pd.Timestamp(x)) < pd.Timedelta('1 days'))]
    reload_timestamp = datetime.datetime.now().strftime('%D %T')

    print(f'Reloaded data at {reload_timestamp}')


def start_scheduler():
    scheduler = BackgroundScheduler()
    scheduler.add_job(func=load_data, trigger="interval", seconds=60*30)
    scheduler.start()

    # Shut down the scheduler when exiting the app
    atexit.register(lambda: scheduler.shutdown())


@app.get("/")
def home():
    return "Welcome to the Bittensor Protein Folding Leaderboard API!"


@app.get("/updated")
def updated():
    return reload_timestamp


@app.get("/productivity", response_model=Productivity)
def productivity_metrics():
    """
    Get the productivity metrics
    """

    result = utils.get_productivity(df_all=data_all, df_24h=data_24h, df_30d=data_30d)


    return result
@app.get("/metagraph", response_model=Metagraph)
def get_metagraph():
    """
    Get the metagraph
    """
    
    df_m = utils.get_metagraph()
    df_miners = df_m.sort_values('I', ascending=False).reset_index()
    incentives = df_miners['I'].astype(float).values
    emissions = df_miners['E'].astype(float).values
    identities = df_miners['identity']
    hotkeys = df_miners['hotkey']
    coldkeys = df_miners['coldkey']
    trusts = df_miners['trust'].astype(float).values
    results = {'incentives': incentives,
               'emissions': emissions,
               'identities': identities,
               'hotkeys': hotkeys,
               'coldkeys': coldkeys,
               'trusts': trusts}
    return results

@app.get("/throughput", response_model=Throughput)
def throughput_metrics():
    """
    Get the throughput metrics
    """
    
    return utils.get_data_transferred(data_all, data_24h)



if __name__ == '__main__':

    load_data()

    start_scheduler()

    uvicorn.run(app, host='0.0.0.0', port=5001)
    

    # to test locally
    # curl -X GET http://0.0.0.0:5001/data