Spaces:
Configuration error
Configuration error
Commit
·
f6e1dec
1
Parent(s):
366e63b
finish
Browse files- .gitignore +3 -0
- dataset_util.py +13 -0
- indexing_util.py +21 -0
- leaderboard.py +197 -0
- requirements.txt +8 -0
- subnet_util.py +132 -0
- utils.py +150 -0
.gitignore
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
my-env/
|
2 |
+
.env
|
3 |
+
__pycache__/
|
dataset_util.py
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
from datasets import load_dataset
|
3 |
+
def load_data(repo="bittensor-dataset/twitter-text-dataset"):
|
4 |
+
dataset = load_dataset(repo)
|
5 |
+
return dataset
|
6 |
+
def get_num_rows(dataset):
|
7 |
+
num_rows = dataset['train'].num_rows
|
8 |
+
return num_rows
|
9 |
+
if __name__=="__main__":
|
10 |
+
dataset = load_data()
|
11 |
+
num_rows = dataset['train'].num_rows
|
12 |
+
load_data()
|
13 |
+
|
indexing_util.py
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import redis
|
2 |
+
import os
|
3 |
+
import dotenv
|
4 |
+
dotenv.load_dotenv()
|
5 |
+
|
6 |
+
hotkey_daily_indexing = redis.Redis(host = os.getenv("REDIS_HOST"), port = os.getenv("REDIS_PORT"), password=os.getenv("REDIS_PASSWORD"), db = 2)
|
7 |
+
hotkey_indexing = redis.Redis(host = os.getenv("REDIS_HOST"), port = os.getenv("REDIS_PORT"), password=os.getenv("REDIS_PASSWORD"), db = 3)
|
8 |
+
daily_indexing = redis.Redis(host = os.getenv("REDIS_HOST"), port = os.getenv("REDIS_PORT"), password=os.getenv("REDIS_PASSWORD"), db = 4)
|
9 |
+
|
10 |
+
def get_all(redis_db):
|
11 |
+
keys = redis_db.keys()
|
12 |
+
# print(keys)
|
13 |
+
result = []
|
14 |
+
for key in keys:
|
15 |
+
value = redis_db.get(key)
|
16 |
+
result.append((key, value))
|
17 |
+
return result
|
18 |
+
if __name__ == "__main__":
|
19 |
+
print(get_all(hotkey_daily_indexing))
|
20 |
+
print(get_all(hotkey_indexing))
|
21 |
+
print(get_all(daily_indexing))
|
leaderboard.py
ADDED
@@ -0,0 +1,197 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import pandas as pd
|
3 |
+
import matplotlib.pyplot as plt
|
4 |
+
from dataset_util import load_data, get_num_rows
|
5 |
+
import subnet_util
|
6 |
+
import datetime
|
7 |
+
import typing
|
8 |
+
import indexing_util
|
9 |
+
from io import BytesIO
|
10 |
+
FONT = """<link href="https://fonts.cdnfonts.com/css/intersect-c-brk" rel="stylesheet">"""
|
11 |
+
TITLE_FONT = """<link href="https://fonts.cdnfonts.com/css/promova" rel="stylesheet">"""
|
12 |
+
TITLE = """ <h1 align = "center" id = "space-title" class = "intersect"> D3 Subnet Leaderboard</h1> """
|
13 |
+
DESCRIPTION = """<marquee><h3 align= "center"> The D3 Subnet, standing for Decentralized Distributed Data Scraping subnet, plays a crucial role in the advancement of artificial intelligence by ensuring ample training data for all Bittensor AI networks. </h3></marquee>"""
|
14 |
+
IMAGE = """<a href="https://discord.com/channels/799672011265015819/1161764869280903240" target="_blank"><img src="https://cdn.discordapp.com/attachments/1204940599145267200/1227239850332131388/5CB42426-0E73-4D10-A66A-9E256C6A6183.png?ex=6627af2d&is=66153a2d&hm=02b9870618dd8e2e6bf62a0635f3cc8020221a0c7c2568138070f614e63a2068&" alt="D3 Subnet" style="margin: auto; width: 20%; border: 0;" /></a>"""
|
15 |
+
|
16 |
+
last_refresh = None
|
17 |
+
demo = gr.Blocks(css="""
|
18 |
+
.intersect {font-family: 'Intersect C BRK', sans-serif; font-size:40px}
|
19 |
+
.promova {font-family: 'Promova', sans-serif; font-size:40px}
|
20 |
+
""")
|
21 |
+
|
22 |
+
twitter_text_dataset = load_data("bittensor-dataset/twitter-text-dataset")
|
23 |
+
twitter_text_num_rows = get_num_rows(twitter_text_dataset)
|
24 |
+
twitter_image_dataset = load_data("bittensor-dataset/twitter-image-dataset")
|
25 |
+
twitter_image_num_rows = get_num_rows(twitter_image_dataset)
|
26 |
+
|
27 |
+
tao_price = subnet_util.get_tao_price()
|
28 |
+
(subtensor, metagraph) = subnet_util.get_subtensor_and_metagraph()
|
29 |
+
last_refresh = datetime.datetime.now()
|
30 |
+
miners_data = subnet_util.get_subnet_data(subtensor, metagraph)
|
31 |
+
|
32 |
+
|
33 |
+
|
34 |
+
daily_indexing_data = indexing_util.get_all(indexing_util.daily_indexing)
|
35 |
+
daily_df = pd.DataFrame(daily_indexing_data, columns=['Date', 'Value'])
|
36 |
+
daily_df['Date'] = pd.to_datetime(daily_df['Date'].str.decode('utf-8'))
|
37 |
+
daily_df['Value'] = daily_df['Value'].astype(int)
|
38 |
+
|
39 |
+
hotkey_indexing_data = indexing_util.get_all(indexing_util.hotkey_indexing)
|
40 |
+
hotkey_df = pd.DataFrame(hotkey_indexing_data, columns=['Hotkey', 'Value'])
|
41 |
+
hotkey_df['Hotkey'] = hotkey_df['Hotkey'].str.decode('utf-8')
|
42 |
+
hotkey_df['Value'] = hotkey_df['Value'].astype(int)
|
43 |
+
|
44 |
+
hotkey_daily_indexing_data = indexing_util.get_all(indexing_util.hotkey_daily_indexing)
|
45 |
+
hotkey_daily_df = pd.DataFrame(hotkey_daily_indexing_data, columns=['Hotkey_Date', 'Value'])
|
46 |
+
hotkey_daily_df_= pd.DataFrame()
|
47 |
+
hotkey_daily_df_['Hotkey'] = hotkey_daily_df['Hotkey_Date'].str.decode('utf-8').str.split(' ').str[0]
|
48 |
+
hotkey_daily_df_['Date'] = hotkey_daily_df['Hotkey_Date'].str.decode('utf-8').str.split(' ').str[1]
|
49 |
+
hotkey_daily_df_['Value'] = hotkey_daily_df['Value'].astype(int)
|
50 |
+
|
51 |
+
print(hotkey_daily_df_)
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
def leaderboard_data(
|
59 |
+
# show_stale: bool,
|
60 |
+
# scores: typing.Dict[int, typing.Dict[str, typing.Optional[float | str]]],
|
61 |
+
# competition_id: str,
|
62 |
+
):
|
63 |
+
value = [
|
64 |
+
[
|
65 |
+
c.hotkey[0:8],
|
66 |
+
c.uid,
|
67 |
+
c.url,
|
68 |
+
c.block,
|
69 |
+
]
|
70 |
+
for c in miners_data
|
71 |
+
# if c.incentive and c.url[0:8] == "https://"
|
72 |
+
]
|
73 |
+
return value
|
74 |
+
|
75 |
+
with demo:
|
76 |
+
gr.HTML(FONT)
|
77 |
+
gr.HTML(TITLE_FONT)
|
78 |
+
gr.HTML(TITLE)
|
79 |
+
gr.HTML(IMAGE)
|
80 |
+
gr.HTML(DESCRIPTION)
|
81 |
+
|
82 |
+
with gr.Tabs():
|
83 |
+
with gr.Accordion("Dataset Stats"):
|
84 |
+
with gr.Row():
|
85 |
+
with gr.Column(scale=1):
|
86 |
+
gr.HTML(f"<h2 align = 'center' style = 'font-size: 25px' >Current Size of Text Dataset: <span style = 'font-size: 30px; color: green;'>{twitter_text_num_rows}</span></h2>")
|
87 |
+
with gr.Column(scale=1):
|
88 |
+
gr.HTML(f"<h2 align = 'center' style = 'font-size: 25px' >Current Size of Image Dataset: <span style = 'font-size: 30px; color: green;'>{twitter_image_num_rows}</span></h2>")
|
89 |
+
with gr.Accordion("Subnet Stats"):
|
90 |
+
gr.HTML(f"""<h2 align = 'center' class="promova" style = 'font-size: 35px;' > Miner Stats</h2>""")
|
91 |
+
|
92 |
+
with gr.Row():
|
93 |
+
with gr.Column(scale=1):
|
94 |
+
gr.BarPlot(
|
95 |
+
daily_df,
|
96 |
+
x="Date",
|
97 |
+
y="Value",
|
98 |
+
title="Daliy scraped data amount",
|
99 |
+
# color="Date",
|
100 |
+
tooltip=["Date", "Value"],
|
101 |
+
y_lim=[0, 1000],
|
102 |
+
x_title="Date",
|
103 |
+
y_title="Amount of data scraped",
|
104 |
+
height=500,
|
105 |
+
width=500,
|
106 |
+
scale=5,
|
107 |
+
color="Value",
|
108 |
+
color_legend_position="top",
|
109 |
+
# elem_classes="daily_scraped_data",
|
110 |
+
)
|
111 |
+
with gr.Column(scale=1):
|
112 |
+
gr.BarPlot(
|
113 |
+
hotkey_df,
|
114 |
+
x="Hotkey",
|
115 |
+
y="Value",
|
116 |
+
title="Scraped data amount of each Miner",
|
117 |
+
# color="Date",
|
118 |
+
tooltip=["Hotkey", "Value"],
|
119 |
+
y_lim=[0, 1000],
|
120 |
+
x_title="Date",
|
121 |
+
y_title="Amount of data scraped",
|
122 |
+
height=500,
|
123 |
+
width=500,
|
124 |
+
scale=5,
|
125 |
+
color="Value",
|
126 |
+
x_label_angle=-30,
|
127 |
+
color_legend_position="top",
|
128 |
+
# elem_classes="daily_scraped_data",
|
129 |
+
)
|
130 |
+
|
131 |
+
gr.ScatterPlot(
|
132 |
+
hotkey_daily_df_,
|
133 |
+
x="Date",
|
134 |
+
y="Value",
|
135 |
+
title="Daily scraped data amount of each Miner",
|
136 |
+
# color="Date",
|
137 |
+
tooltip=["Hotkey"],
|
138 |
+
y_lim=[0, 1000],
|
139 |
+
x_title="Date",
|
140 |
+
y_title="Amount of data scraped",
|
141 |
+
height=500,
|
142 |
+
width=1000,
|
143 |
+
scale=5,
|
144 |
+
color="Hotkey",
|
145 |
+
x_label_angle=-30,
|
146 |
+
color_legend_position="top",
|
147 |
+
# elem_classes="daily_scraped_data",
|
148 |
+
)
|
149 |
+
with gr.Tab(label="Miners Data"):
|
150 |
+
class_denominator = sum(
|
151 |
+
miners_data[i].incentive #TODO: emssion to incentive
|
152 |
+
for i in range(0, min(10, len(miners_data)))
|
153 |
+
if miners_data[i].incentive
|
154 |
+
)
|
155 |
+
class_values = {
|
156 |
+
f"(uid={miners_data[i].uid}, hotkey={miners_data[i].hotkey[0:8]}) - {miners_data[i].url} · ${round(miners_data[i].emission * tao_price, 2):,} (τ{round(miners_data[i].emission, 2):,})": miners_data[i].incentive / class_denominator
|
157 |
+
for i in range(0, min(10, len(miners_data)))
|
158 |
+
if miners_data[i].incentive
|
159 |
+
}
|
160 |
+
gr.Label(
|
161 |
+
label="Top 10 Miners",
|
162 |
+
value=class_values,
|
163 |
+
num_top_classes=10,
|
164 |
+
)
|
165 |
+
# miner_table = gr.components.Dataframe(
|
166 |
+
# value=miners_data
|
167 |
+
# )
|
168 |
+
with gr.Accordion("Miner stats"):
|
169 |
+
gr.HTML(
|
170 |
+
f"""<h3>{last_refresh.strftime("refreshed at %H:%M on %Y-%m-%d")}</h3>"""
|
171 |
+
)
|
172 |
+
# with gr.Tabs():
|
173 |
+
# for entry in miners_data:
|
174 |
+
# name = f"uid={entry.uid} : commit={entry.commit[0:8]} : url={entry.url}"
|
175 |
+
# with gr.Tab(name):
|
176 |
+
# gr.Chatbot()
|
177 |
+
leaderboard_table = gr.components.Dataframe(
|
178 |
+
value=leaderboard_data(),
|
179 |
+
headers = [
|
180 |
+
"Hotkey",
|
181 |
+
"UID",
|
182 |
+
"Url",
|
183 |
+
"Block",
|
184 |
+
],
|
185 |
+
datatype=[
|
186 |
+
"markdown",
|
187 |
+
"number",
|
188 |
+
"markdown",
|
189 |
+
"number",
|
190 |
+
|
191 |
+
],
|
192 |
+
elem_id="leaderboard_table",
|
193 |
+
interactive=False,
|
194 |
+
visible=True,
|
195 |
+
|
196 |
+
)
|
197 |
+
demo.launch()
|
requirements.txt
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
gradio
|
2 |
+
bittensor
|
3 |
+
matplotlib
|
4 |
+
pandas
|
5 |
+
datasets
|
6 |
+
tqdm
|
7 |
+
redis
|
8 |
+
python-dotenv
|
subnet_util.py
ADDED
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import bittensor as bt
|
2 |
+
from bittensor.extrinsics.serving import get_metadata
|
3 |
+
from utils import functools, run_in_subprocess
|
4 |
+
from tqdm import tqdm
|
5 |
+
import concurrent.futures
|
6 |
+
import datetime
|
7 |
+
import typing
|
8 |
+
import time
|
9 |
+
import requests
|
10 |
+
from dataclasses import dataclass
|
11 |
+
|
12 |
+
SUBTENSOR = "finney"
|
13 |
+
NETUID = 10
|
14 |
+
METAGRAPH_RETRIES = 10
|
15 |
+
METAGRAPH_DELAY_SECS = 30
|
16 |
+
|
17 |
+
def get_subtensor_and_metagraph() -> typing.Tuple[bt.subtensor, bt.metagraph]:
|
18 |
+
for i in range(0, METAGRAPH_RETRIES):
|
19 |
+
try:
|
20 |
+
print("Connecting to subtensor...")
|
21 |
+
subtensor: bt.subtensor = bt.subtensor(SUBTENSOR)
|
22 |
+
print("Pulling metagraph...")
|
23 |
+
metagraph: bt.metagraph = subtensor.metagraph(NETUID, lite=False)
|
24 |
+
return subtensor, metagraph
|
25 |
+
except:
|
26 |
+
if i == METAGRAPH_RETRIES - 1:
|
27 |
+
raise
|
28 |
+
print(
|
29 |
+
f"Error connecting to subtensor or pulling metagraph, retry {i + 1} of {METAGRAPH_RETRIES} in {METAGRAPH_DELAY_SECS} seconds..."
|
30 |
+
)
|
31 |
+
time.sleep(METAGRAPH_DELAY_SECS)
|
32 |
+
raise RuntimeError()
|
33 |
+
|
34 |
+
def get_tao_price() -> float:
|
35 |
+
for i in range(0, METAGRAPH_RETRIES):
|
36 |
+
try:
|
37 |
+
return float(requests.get("https://api.mexc.com/api/v3/avgPrice?symbol=TAOUSDT").json()["price"])
|
38 |
+
except:
|
39 |
+
if i == METAGRAPH_RETRIES - 1:
|
40 |
+
raise
|
41 |
+
time.sleep(METAGRAPH_DELAY_SECS)
|
42 |
+
raise RuntimeError()
|
43 |
+
|
44 |
+
|
45 |
+
@dataclass
|
46 |
+
class MinerData:
|
47 |
+
uid: int
|
48 |
+
hotkey: str
|
49 |
+
block: int
|
50 |
+
url: str
|
51 |
+
incentive: float
|
52 |
+
emission: float
|
53 |
+
|
54 |
+
@classmethod
|
55 |
+
def from_compressed_str(
|
56 |
+
cls,
|
57 |
+
uid: int,
|
58 |
+
hotkey: str,
|
59 |
+
block: int,
|
60 |
+
cs:str,
|
61 |
+
incentive: float,
|
62 |
+
emission: float,
|
63 |
+
):
|
64 |
+
"""Returns an instance of this class from a compressed string representation"""
|
65 |
+
tokens = cs.split(" ")
|
66 |
+
return MinerData(
|
67 |
+
uid=uid,
|
68 |
+
hotkey=hotkey,
|
69 |
+
block=block,
|
70 |
+
url=tokens[0],
|
71 |
+
incentive=incentive,
|
72 |
+
emission=emission,
|
73 |
+
)
|
74 |
+
|
75 |
+
def get_subnet_data(
|
76 |
+
subtensor: bt.subtensor, metagraph: bt.metagraph
|
77 |
+
):
|
78 |
+
|
79 |
+
# Function to be executed in a thread
|
80 |
+
|
81 |
+
def fetch_data(uid):
|
82 |
+
hotkey = metagraph.hotkeys[uid]
|
83 |
+
try:
|
84 |
+
partial = functools.partial(
|
85 |
+
get_metadata, subtensor, metagraph.netuid, hotkey
|
86 |
+
)
|
87 |
+
metadata = run_in_subprocess(partial, 30)
|
88 |
+
except Exception as e:
|
89 |
+
return None
|
90 |
+
|
91 |
+
if not metadata:
|
92 |
+
return None
|
93 |
+
commitment = metadata["info"]["fields"][0]
|
94 |
+
hex_data = commitment[list(commitment.keys())[0]][2:]
|
95 |
+
chain_str = bytes.fromhex(hex_data).decode()
|
96 |
+
block = metadata["block"]
|
97 |
+
incentive = metagraph.incentive[uid].nan_to_num().item()
|
98 |
+
emission = (
|
99 |
+
metagraph.emission[uid].nan_to_num().item() * 20
|
100 |
+
) # convert to daily TAO
|
101 |
+
|
102 |
+
try:
|
103 |
+
model_data = MinerData.from_compressed_str(
|
104 |
+
uid, hotkey, block, chain_str, incentive, emission
|
105 |
+
)
|
106 |
+
except Exception as e:
|
107 |
+
print(f"Error parsing model data for uid {uid}: {e}")
|
108 |
+
return None
|
109 |
+
print(model_data)
|
110 |
+
return model_data
|
111 |
+
|
112 |
+
# Use ThreadPoolExecutor to fetch data in parallel
|
113 |
+
results = []
|
114 |
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
115 |
+
# Prepare the list of futures
|
116 |
+
futures = [executor.submit(fetch_data, uid) for uid in metagraph.uids.tolist()]
|
117 |
+
|
118 |
+
for future in tqdm(
|
119 |
+
concurrent.futures.as_completed(futures),
|
120 |
+
desc="Metadata for hotkeys",
|
121 |
+
total=len(futures),
|
122 |
+
):
|
123 |
+
result = future.result()
|
124 |
+
if result and result.url[0] != "{":
|
125 |
+
results.append(result)
|
126 |
+
|
127 |
+
return results
|
128 |
+
|
129 |
+
if __name__ == "__main__":
|
130 |
+
(subtensor, metagraph) = get_subtensor_and_metagraph()
|
131 |
+
data = get_subnet_data(subtensor, metagraph)
|
132 |
+
print(data)
|
utils.py
ADDED
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import functools
|
2 |
+
import multiprocessing
|
3 |
+
import os
|
4 |
+
import codecs
|
5 |
+
import re
|
6 |
+
from typing import Any, Optional, Tuple
|
7 |
+
import bittensor as bt
|
8 |
+
|
9 |
+
|
10 |
+
# def sopt_check(row):
|
11 |
+
|
12 |
+
|
13 |
+
|
14 |
+
def assert_registered(wallet: bt.wallet, metagraph: bt.metagraph) -> int:
|
15 |
+
"""Asserts the wallet is a registered miner and returns the miner's UID.
|
16 |
+
|
17 |
+
Raises:
|
18 |
+
ValueError: If the wallet is not registered.
|
19 |
+
"""
|
20 |
+
if wallet.hotkey.ss58_address not in metagraph.hotkeys:
|
21 |
+
raise ValueError(
|
22 |
+
f"You are not registered. \nUse: \n`btcli s register --netuid {metagraph.netuid}` to register via burn \n or btcli s pow_register --netuid {metagraph.netuid} to register with a proof of work"
|
23 |
+
)
|
24 |
+
uid = metagraph.hotkeys.index(wallet.hotkey.ss58_address)
|
25 |
+
bt.logging.success(
|
26 |
+
f"You are registered with address: {wallet.hotkey.ss58_address} and uid: {uid}"
|
27 |
+
)
|
28 |
+
|
29 |
+
return uid
|
30 |
+
|
31 |
+
|
32 |
+
# def validate_hf_repo_id(repo_id: str) -> Tuple[str, str]:
|
33 |
+
# """Verifies a Hugging Face repo id is valid and returns it split into namespace and name.
|
34 |
+
|
35 |
+
# Raises:
|
36 |
+
# ValueError: If the repo id is invalid.
|
37 |
+
# """
|
38 |
+
|
39 |
+
# if not repo_id:
|
40 |
+
# raise ValueError("Hugging Face repo id cannot be empty.")
|
41 |
+
|
42 |
+
# if not 3 < len(repo_id) <= ModelId.MAX_REPO_ID_LENGTH:
|
43 |
+
# raise ValueError(
|
44 |
+
# f"Hugging Face repo id must be between 3 and {ModelId.MAX_REPO_ID_LENGTH} characters. Got={repo_id}"
|
45 |
+
# )
|
46 |
+
|
47 |
+
# parts = repo_id.split("/")
|
48 |
+
# if len(parts) != 2:
|
49 |
+
# raise ValueError(
|
50 |
+
# f"Hugging Face repo id must be in the format <org or user name>/<repo_name>. Got={repo_id}"
|
51 |
+
# )
|
52 |
+
|
53 |
+
# return parts[0], parts[1]
|
54 |
+
|
55 |
+
|
56 |
+
# def get_hf_url(model_metadata: ModelMetadata) -> str:
|
57 |
+
# """Returns the URL to the Hugging Face repo for the provided model metadata."""
|
58 |
+
# return f"https://huggingface.co/{model_metadata.id.namespace}/{model_metadata.id.name}/tree/{model_metadata.id.commit}"
|
59 |
+
|
60 |
+
|
61 |
+
def _wrapped_func(func: functools.partial, queue: multiprocessing.Queue):
|
62 |
+
try:
|
63 |
+
result = func()
|
64 |
+
queue.put(result)
|
65 |
+
except (Exception, BaseException) as e:
|
66 |
+
# Catch exceptions here to add them to the queue.
|
67 |
+
queue.put(e)
|
68 |
+
|
69 |
+
|
70 |
+
def run_in_subprocess(func: functools.partial, ttl: int, mode="fork") -> Any:
|
71 |
+
"""Runs the provided function on a subprocess with 'ttl' seconds to complete.
|
72 |
+
|
73 |
+
Args:
|
74 |
+
func (functools.partial): Function to be run.
|
75 |
+
ttl (int): How long to try for in seconds.
|
76 |
+
|
77 |
+
Returns:
|
78 |
+
Any: The value returned by 'func'
|
79 |
+
"""
|
80 |
+
ctx = multiprocessing.get_context(mode)
|
81 |
+
queue = ctx.Queue()
|
82 |
+
process = ctx.Process(target=_wrapped_func, args=[func, queue])
|
83 |
+
|
84 |
+
process.start()
|
85 |
+
|
86 |
+
process.join(timeout=ttl)
|
87 |
+
|
88 |
+
if process.is_alive():
|
89 |
+
process.terminate()
|
90 |
+
process.join()
|
91 |
+
raise TimeoutError(f"Failed to {func.func.__name__} after {ttl} seconds")
|
92 |
+
|
93 |
+
# Raises an error if the queue is empty. This is fine. It means our subprocess timed out.
|
94 |
+
result = queue.get(block=False)
|
95 |
+
|
96 |
+
# If we put an exception on the queue then raise instead of returning.
|
97 |
+
if isinstance(result, Exception):
|
98 |
+
raise result
|
99 |
+
if isinstance(result, BaseException):
|
100 |
+
raise Exception(f"BaseException raised in subprocess: {str(result)}")
|
101 |
+
|
102 |
+
return result
|
103 |
+
|
104 |
+
|
105 |
+
def get_version() -> str:
|
106 |
+
"""
|
107 |
+
Retrieves the version.
|
108 |
+
|
109 |
+
"""
|
110 |
+
base_directory = os.path.dirname(os.path.abspath(__file__))
|
111 |
+
with codecs.open(os.path.join(base_directory, '../__init__.py'), encoding='utf-8') as init_file:
|
112 |
+
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", init_file.read(), re.M)
|
113 |
+
version = version_match.group(1)
|
114 |
+
return version
|
115 |
+
|
116 |
+
def upgrade_version():
|
117 |
+
"""
|
118 |
+
Upgrade if there is a new version available
|
119 |
+
|
120 |
+
"""
|
121 |
+
local_version = get_version()
|
122 |
+
bt.logging.info(f"You are using v{local_version}")
|
123 |
+
try:
|
124 |
+
os.system("git pull origin main > /dev/null 2>&1")
|
125 |
+
remote_version = get_version()
|
126 |
+
if local_version != remote_version:
|
127 |
+
os.system("python3 -m pip install -e . > /dev/null 2>&1")
|
128 |
+
bt.logging.info(f"⏫ Upgraded to v{remote_version}")
|
129 |
+
os._exit(0)
|
130 |
+
except Exception as e:
|
131 |
+
bt.logging.error(f"❌ Error occured while upgrading the version : {e}")
|
132 |
+
|
133 |
+
def save_version(filepath: str, version: int):
|
134 |
+
"""Saves a version to the provided filepath."""
|
135 |
+
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
136 |
+
with open(filepath, "w") as f:
|
137 |
+
f.write(str(version))
|
138 |
+
|
139 |
+
|
140 |
+
def move_file_if_exists(src: str, dst: str) -> bool:
|
141 |
+
"""Moves a file from src to dst if it exists.
|
142 |
+
|
143 |
+
Returns:
|
144 |
+
bool: True if the file was moved, False otherwise.
|
145 |
+
"""
|
146 |
+
if os.path.exists(src) and not os.path.exists(dst):
|
147 |
+
os.makedirs(os.path.dirname(dst), exist_ok=True)
|
148 |
+
os.replace(src, dst)
|
149 |
+
return True
|
150 |
+
return False
|