rdiehlmartinez commited on
Commit
89bc030
·
1 Parent(s): cc1972d

adding first pass dataset loading script

Browse files
Files changed (1) hide show
  1. pythia_training_metrics.py +159 -0
pythia_training_metrics.py ADDED
@@ -0,0 +1,159 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import datasets
2
+ import pickle
3
+
4
+
5
+ _DESCRIPTION = """\
6
+ Dataset for storing training metrics of pythia models
7
+ """
8
+
9
+ class PythiaTrainingMetrics(datasets.GeneratorBasedBuilder):
10
+
11
+ MODEL_SIZES = [
12
+ "70m",
13
+ "160m",
14
+ "410m",
15
+ "1b",
16
+ "1.4b",
17
+ "2.8b",
18
+ "6.9b"
19
+ ]
20
+
21
+ _GRADIENTS_DESCRIPTION = """\
22
+ Dataset for storing gradients of pythia models
23
+ """
24
+
25
+ _WEIGHTS_DESCRIPTION = """\
26
+ Dataset for storing weights of pythia models
27
+ """
28
+
29
+ _WEIGHTS_MINI_DESCRIPTION = """\
30
+ Dataset for storing weights of pythia models (minimizes the amount of gradients per
31
+ checkpoint to only 2)
32
+ """
33
+
34
+ _ACTIVATIONS_DESCRIPTION = """\
35
+ Dataset for storing activations of pythia models
36
+ """
37
+
38
+ BUILDER_CONFIGS = [
39
+ datasets.BuilderConfig(
40
+ name="gradients",
41
+ description=_WEIGHTS_DESCRIPTION,
42
+ version="1.0.0",
43
+ ),
44
+ datasets.BuilderConfig(
45
+ name="gradients_mini",
46
+ description=_WEIGHTS_MINI_DESCRIPTION,
47
+ version="1.0.0",
48
+ ),
49
+ datasets.BuilderConfig(
50
+ name="activations ",
51
+ description=_ACTIVATIONS_DESCRIPTION,
52
+ version="1.0.0",
53
+ ),
54
+ datasets.BuilderConfig(
55
+ name="weights",
56
+ description=_WEIGHTS_DESCRIPTION,
57
+ version="1.0.0",
58
+ ),
59
+ datasets.BuilderConfig(
60
+ name="all",
61
+ description="All the metrics",
62
+ version="1.0.0",
63
+ )
64
+ ]
65
+
66
+ def _info(self):
67
+ """
68
+ TODO: Got to figure out how to represent the features etc.
69
+
70
+ how do we do this if each feature is dependent on the model size?
71
+ """
72
+
73
+ features_dict = {
74
+ "checkpoint_step": datasets.Value('int32'),
75
+ "layer_name": datasets.Value('string'),
76
+ }
77
+
78
+ if self.config.name in ["activations", "weights"]:
79
+ features_dict['data'] = datasets.Sequence(datasets.Value('float32'))
80
+ elif self.config_name in ["gradients", "gradients_mini"]:
81
+ features_dict['gradient_step'] = datasets.Value('int32')
82
+ features_dict['gradient'] = datasets.Sequence(datasets.Value('float32'))
83
+
84
+ features = datasets.Features(features_dict)
85
+
86
+ return datasets.DatasetInfo(
87
+ description=_DESCRIPTION,
88
+ features=features,
89
+ )
90
+
91
+
92
+ def _split_generators(self, dl_manager: datasets.DownloadManager):
93
+ """
94
+ Returns data for different splits - we define a split as a model size.
95
+ """
96
+
97
+ model_size_to_fp = { model_size: [] for model_size in self.MODEL_SIZES }
98
+
99
+ checkpoint_steps = [0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1000, ]
100
+ checkpoint_steps.extend([3000 + (i * 10000) for i in range(0, 15)])
101
+
102
+ def get_gradient_step(step: int):
103
+ """
104
+ Return a list of the gradient steps that are used at a given checkpoint step.
105
+ """
106
+ return list(range(max(0, step-5), min(step+6, 143_000)))
107
+
108
+ for model_size in self.MODEL_SIZES:
109
+ for checkpoint_step in checkpoint_steps:
110
+
111
+ directory_path = f"./models/{model_size}/checkpoint_{checkpoint_step}"
112
+
113
+ if self.config.name == "activations":
114
+ model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_activations.pickle")
115
+ elif self.config_name == "weights":
116
+ model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_weights.pickle")
117
+ elif self.config_name == "gradients":
118
+ for gradient_step in get_gradient_step(checkpoint_step):
119
+ model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_gradients_{gradient_step}.pickle")
120
+ elif self.config_name == "gradients_mini":
121
+ for gradient_step in get_gradient_step(checkpoint_step)[:2]:
122
+ model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_gradients_mini_{gradient_step}.pickle")
123
+
124
+ downloaded_files = dl_manager.download_and_extract(model_size_to_fp)
125
+
126
+ return [
127
+ datasets.SplitGenerator(
128
+ name=datasets.Split.TRAIN,
129
+ gen_kwargs={
130
+ "filepaths": downloaded_fps
131
+ }
132
+ ) for downloaded_fps in downloaded_files.values()
133
+ ]
134
+
135
+ def _generate_examples(self, filepaths):
136
+
137
+ # the filepaths should be a list of filepaths
138
+ if isinstance(filepaths, str):
139
+ filepaths = [filepaths]
140
+
141
+ global_idx = 0 # the unique identifier for the example
142
+
143
+ for filepath in filepaths:
144
+ with open(filepath, encoding="utf-8") as f:
145
+ data = pickle.load(f)
146
+
147
+ # extract checkpoint step from the filepath
148
+ checkpoint_step = int(filepath.split("/")[1].split("_")[-1])
149
+
150
+ if self.config.name in ["activations", "weights"]:
151
+ for layer_name, layer_data in data.items():
152
+ for data in layer_data:
153
+ yield global_idx, {"checkpoint_step": checkpoint_step, "layer_name": layer_name, "data": data}
154
+ global_idx += 1
155
+ elif self.config.name in ["gradients", "gradients_mini"]:
156
+ for layer_name, layer_data in data.items():
157
+ for gradient_step, gradient in layer_data.items():
158
+ yield global_idx, {"checkpoint_step": checkpoint_step, "layer_name": layer_name, "gradient_step": gradient_step, "gradient": gradient}
159
+ global_idx += 1