Spaces:
Sleeping
Sleeping
Sarkosos
commited on
Commit
·
5a605f5
1
Parent(s):
9da9693
using historical data from otf wandb and current data
Browse files
utils.py
CHANGED
@@ -15,7 +15,7 @@ import bittensor as bt
|
|
15 |
MIN_STEPS = 12 # minimum number of steps in wandb run in order to be worth analyzing
|
16 |
MAX_RUNS = 100#0000
|
17 |
NETUID = 25
|
18 |
-
|
19 |
NETWORK = 'finney'
|
20 |
KEYS = None
|
21 |
ABBREV_CHARS = 8
|
@@ -25,7 +25,7 @@ PDBS_PER_RUN_STEP = 0.083
|
|
25 |
AVG_MD_STEPS = 30_000
|
26 |
BASE_UNITS = 'MB'
|
27 |
|
28 |
-
api = wandb.Api(timeout=120, api_key=
|
29 |
|
30 |
IDENTITIES = {
|
31 |
'5F4tQyWrhfGVcNhoqeiNsR6KjD4wMZ2kfhLj4oHYuyHbZAc3': 'opentensor',
|
@@ -201,7 +201,7 @@ def load_run(run_path, keys=KEYS):
|
|
201 |
return df
|
202 |
|
203 |
@st.cache_data(show_spinner=False)
|
204 |
-
def build_data(timestamp=None,
|
205 |
|
206 |
save_path = '_saved_runs.csv'
|
207 |
filters = {}
|
@@ -216,20 +216,24 @@ def build_data(timestamp=None, path=BASE_PATH, min_steps=MIN_STEPS, use_cache=Tr
|
|
216 |
filters.update({'updated_at': {'$gte': timestamp_str}})
|
217 |
|
218 |
progress = st.progress(0, text='Loading data')
|
|
|
|
|
|
|
219 |
|
220 |
-
|
221 |
-
|
222 |
run_data = []
|
223 |
n_events = 0
|
224 |
-
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
|
231 |
-
|
232 |
-
|
|
|
|
|
233 |
|
234 |
progress.empty()
|
235 |
|
|
|
15 |
MIN_STEPS = 12 # minimum number of steps in wandb run in order to be worth analyzing
|
16 |
MAX_RUNS = 100#0000
|
17 |
NETUID = 25
|
18 |
+
BASE_PATHS = ['macrocosmos/folding-validators--moved', 'macrocosmos/folding-validators'] # added historical data from otf wandb and current data
|
19 |
NETWORK = 'finney'
|
20 |
KEYS = None
|
21 |
ABBREV_CHARS = 8
|
|
|
25 |
AVG_MD_STEPS = 30_000
|
26 |
BASE_UNITS = 'MB'
|
27 |
|
28 |
+
api = wandb.Api(timeout=120, api_key='cdcbe340bb7937d3a289d39632491d12b39231b7')
|
29 |
|
30 |
IDENTITIES = {
|
31 |
'5F4tQyWrhfGVcNhoqeiNsR6KjD4wMZ2kfhLj4oHYuyHbZAc3': 'opentensor',
|
|
|
201 |
return df
|
202 |
|
203 |
@st.cache_data(show_spinner=False)
|
204 |
+
def build_data(timestamp=None, paths=BASE_PATHS, min_steps=MIN_STEPS, use_cache=True):
|
205 |
|
206 |
save_path = '_saved_runs.csv'
|
207 |
filters = {}
|
|
|
216 |
filters.update({'updated_at': {'$gte': timestamp_str}})
|
217 |
|
218 |
progress = st.progress(0, text='Loading data')
|
219 |
+
|
220 |
+
historical_runs = api.runs(paths[0], filters=filters)
|
221 |
+
historical_and_current_runs = [historical_runs, api.runs(paths[1], filters=filters)]
|
222 |
|
223 |
+
|
|
|
224 |
run_data = []
|
225 |
n_events = 0
|
226 |
+
total_runs = len(historical_and_current_runs[0])+len(historical_and_current_runs[1])
|
227 |
+
for runs in historical_and_current_runs:
|
228 |
+
for i, run in enumerate(tqdm.tqdm(runs, total=total_runs)):
|
229 |
+
num_steps = run.summary.get('_step',0)
|
230 |
+
if num_steps<min_steps:
|
231 |
+
continue
|
232 |
+
n_events += num_steps
|
233 |
+
prog_msg = f'Loading data {i/total_runs*100:.0f}%, {n_events:,.0f} events)'
|
234 |
+
progress.progress(i/total_runs,text=f'{prog_msg}... **downloading** `{os.path.join(*run.path)}`')
|
235 |
+
|
236 |
+
run_data.append(run)
|
237 |
|
238 |
progress.empty()
|
239 |
|