File size: 4,394 Bytes
101093d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
import os
import sys
import argparse
from traceback import print_exc
import pickle
import tqdm
import pandas as pd
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor

import torch
import bittensor

#TODO: make line charts and other cool stuff for each metagraph snapshot

def process(block, netuid=1, lite=True, difficulty=False, prune_weights=False, return_graph=False, half=True, subtensor=None):

    if subtensor is None:
        subtensor = bittensor.subtensor(network='finney')        
    
    try:        
        metagraph = subtensor.metagraph(block=block, netuid=netuid, lite=lite)
        if difficulty:
            metagraph.difficulty = subtensor.difficulty(block=block, netuid=netuid)
        
        if not lite:
            if half:
                metagraph.weights = torch.nn.Parameter(metagraph.weights.half(), requires_grad=False)
            if prune_weights:
                metagraph.weights = metagraph.weights[metagraph.weights.sum(axis=1) > 0]

        with open(f'data/metagraph/{netuid}/{block}.pkl', 'wb') as f:
            pickle.dump(metagraph, f)

        return metagraph if return_graph else True
    
    except Exception as e:
        print(f'Error processing block {block}: {e}')
        

def parse_arguments():
    parser = argparse.ArgumentParser(description='Process metagraphs for a given network.')
    parser.add_argument('--netuid', type=int, default=1, help='Network UID to use.')
    parser.add_argument('--difficulty', action='store_true', help='Include difficulty in metagraph.')
    parser.add_argument('--prune_weights', action='store_true', help='Prune weights in metagraph.')
    parser.add_argument('--return_graph', action='store_true', help='Return metagraph instead of True.')
    parser.add_argument('--max_workers', type=int, default=32, help='Max workers to use.')
    parser.add_argument('--start_block', type=int, default=1_000_000, help='Start block.')
    parser.add_argument('--end_block', type=int, default=600_000, help='End block.')
    parser.add_argument('--step_size', type=int, default=100, help='Step size.')
    return parser.parse_args()

if __name__ == '__main__':

    subtensor = bittensor.subtensor(network='finney')
    print(f'Current block: {subtensor.block}')
    
    args = parse_arguments()
       
    netuid=args.netuid
    difficulty=args.difficulty
    overwrite=False
    return_graph=args.return_graph       
     
    step_size = args.step_size
    start_block = args.start_block
    start_block = (min(subtensor.block, start_block)//step_size)*step_size # round to nearest step_size
    end_block = args.end_block
    blocks = range(start_block, end_block, -step_size)

    # only get weights for multiple of 500 blocks
    lite=lambda x: x%500!=0
    
    max_workers = min(args.max_workers, len(blocks))

    os.makedirs(f'data/metagraph/{netuid}', exist_ok=True)
    if not overwrite:
        blocks = [block for block in blocks if not os.path.exists(f'data/metagraph/{netuid}/{block}.pkl')]

    metagraphs = []
    
    if len(blocks)==0:
        print(f'No blocks to process. Current block: {subtensor.block}')
        quit()
        
    print(f'Processing {len(blocks)} blocks from {blocks[0]}-{blocks[-1]} using {max_workers} workers.')
    
    with ProcessPoolExecutor(max_workers=max_workers) as executor:
        futures = [
            executor.submit(process, block, lite=lite(block), netuid=netuid, difficulty=difficulty) 
            for block in blocks
            ]

        success = 0        
        with tqdm.tqdm(total=len(futures)) as pbar:
            for block, future in zip(blocks,futures):
                try:
                    metagraphs.append(future.result())
                    success += 1
                except Exception as e:
                    print(f'generated an exception: {print_exc(e)}')
                pbar.update(1)
                pbar.set_description(f'Processed {success} blocks. Current block: {block}')

    if not success:
        raise ValueError('No blocks were successfully processed.')
    
    print(f'Processed {success} blocks.')
    if return_graph:
        for metagraph in metagraphs:
            print(f'{metagraph.block}: {metagraph.n.item()} nodes, difficulty={getattr(metagraph, "difficulty", None)}, weights={metagraph.weights.shape if hasattr(metagraph, "weights") else None}')

    print(metagraphs[-1])