biancaganescu commited on
Commit
f15154b
·
verified ·
1 Parent(s): d7572df

Delete pythia-training-metrics.py

Browse files
Files changed (1) hide show
  1. pythia-training-metrics.py +0 -159
pythia-training-metrics.py DELETED
@@ -1,159 +0,0 @@
1
- import datasets
2
- import pickle
3
-
4
- _DESCRIPTION = """\
5
- Dataset for storing training metrics of pythia models
6
- """
7
-
8
- class PythiaTrainingMetrics(datasets.GeneratorBasedBuilder):
9
-
10
- MODEL_SIZES = [
11
- "14m"
12
- ]
13
-
14
- _GRADIENTS_DESCRIPTION = """\
15
- Dataset for storing gradients of pythia models of the requested model size
16
- """
17
-
18
- _WEIGHTS_DESCRIPTION = """\
19
- Dataset for storing weights of pythia models of the requested model size
20
- """
21
-
22
- _WEIGHTS_MINI_DESCRIPTION = """\
23
- Dataset for storing weights of pythia models (minimizes the amount of gradients per
24
- checkpoint to only 2) of the requested model size
25
- """
26
-
27
- _ACTIVATIONS_DESCRIPTION = """\
28
- Dataset for storing activations of pythia models of the requested model size
29
- """
30
-
31
- BUILDER_CONFIGS = []
32
- for model_size in MODEL_SIZES:
33
- BUILDER_CONFIGS.extend([
34
- datasets.BuilderConfig(
35
- name=f"{model_size}__gradients",
36
- description=_WEIGHTS_DESCRIPTION,
37
- version="1.0.0",
38
- ),
39
- datasets.BuilderConfig(
40
- name=f"{model_size}__gradients_mini",
41
- description=_WEIGHTS_MINI_DESCRIPTION,
42
- version="1.0.0",
43
- ),
44
- datasets.BuilderConfig(
45
- name=f"{model_size}__activations",
46
- description=_ACTIVATIONS_DESCRIPTION,
47
- version="1.0.0",
48
- ),
49
- datasets.BuilderConfig(
50
- name=f"{model_size}__weights",
51
- description=_WEIGHTS_DESCRIPTION,
52
- version="1.0.0",
53
- ),
54
- ])
55
-
56
- def _info(self):
57
- """
58
- NOTE: we might want to specify features, but since the features are different for each
59
- model size it's annoying and kind of pointless since hf does it automatically
60
- """
61
-
62
- return datasets.DatasetInfo(
63
- description=_DESCRIPTION,
64
- )
65
-
66
-
67
- def _split_generators(self, dl_manager: datasets.DownloadManager):
68
- """
69
- Returns data for different splits - we define a split as a model size.
70
- """
71
-
72
- to_download_files = []
73
-
74
- kwargs_checkpoint_steps = []
75
- kwargs_gradient_steps = []
76
-
77
- checkpoint_steps = [0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1000, ]
78
- checkpoint_steps.extend([(i * 10000) for i in range(0, 15)])
79
-
80
- def get_gradient_step(step: int):
81
- """
82
- Return a list of the gradient steps that are used at a given checkpoint step.
83
- """
84
- return list(range(max(0, step-5), min(step+6, 143_000)))
85
-
86
- def get_gradient_mini_step(step: int):
87
- """
88
- Return a list of the gradient steps that are used at a given checkpoint step, we
89
- limit the amount of gradients to only 2.
90
- """
91
- if step != checkpoint_steps[-1]:
92
- return [step, step+1]
93
- else:
94
- return [step-2, step-1]
95
-
96
- model_size = self.config.name.split("__")[0]
97
-
98
- for checkpoint_step in checkpoint_steps:
99
-
100
- directory_path = f"./models/{model_size}/checkpoint_{checkpoint_step}"
101
-
102
- if "activations" in self.config.name:
103
- to_download_files.append(f"{directory_path}/checkpoint_activations.pickle")
104
- kwargs_checkpoint_steps.append(checkpoint_step)
105
- elif "weights" in self.config.name:
106
- to_download_files.append(f"{directory_path}/checkpoint_weights.pickle")
107
- kwargs_checkpoint_steps.append(checkpoint_step)
108
- elif "gradients" in self.config.name:
109
- if "mini" in self.config.name:
110
- gradient_steps = get_gradient_mini_step(checkpoint_step)
111
- else:
112
- gradient_steps = get_gradient_step(checkpoint_step)
113
-
114
- for gradient_step in gradient_steps:
115
- to_download_files.append(f"{directory_path}/checkpoint_gradients_{gradient_step}.pickle")
116
- kwargs_checkpoint_steps.append(checkpoint_step)
117
- kwargs_gradient_steps.append(gradient_step)
118
- else:
119
- raise Exception("Invalid config name")
120
-
121
- downloaded_files = dl_manager.download_and_extract(to_download_files)
122
-
123
- return [
124
- datasets.SplitGenerator(
125
- name='default',
126
- gen_kwargs={
127
- "filepaths": downloaded_files,
128
- "checkpoint_steps": kwargs_checkpoint_steps,
129
- **({"gradient_steps": kwargs_gradient_steps} if "gradients" in self.config.name else {}),
130
- }
131
- )
132
- ]
133
-
134
- def _generate_examples(self, filepaths, checkpoint_steps, **kwargs):
135
-
136
- # the filepaths should be a list of filepaths
137
- if isinstance(filepaths, str):
138
- filepaths = [filepaths]
139
-
140
- if "gradients" in self.config.name:
141
- gradient_steps = kwargs["gradient_steps"]
142
-
143
- global_idx = 0 # the unique identifier for the example
144
-
145
- for idx, filepath in enumerate(filepaths):
146
- with open(filepath, 'rb') as f:
147
- data = pickle.load(f)
148
-
149
- for layer_name, layer_data in data.items():
150
- record = {
151
- "checkpoint_step": checkpoint_steps[idx],
152
- "layer_name": layer_name,
153
- "data": layer_data,
154
- }
155
- if "gradients" in self.config.name:
156
- record['gradient_step'] = gradient_steps[idx]
157
-
158
- yield global_idx, record
159
- global_idx += 1