File size: 5,069 Bytes
89bc030 59b99b1 89bc030 ee0a17c 89bc030 dfaeff2 89bc030 2178182 89bc030 2178182 89bc030 2178182 89bc030 2178182 89bc030 2178182 89bc030 a378726 89bc030 a378726 89bc030 a378726 89bc030 2178182 89bc030 2178182 89bc030 2178182 89bc030 2178182 89bc030 2178182 a378726 89bc030 a378726 89bc030 2178182 a378726 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 |
import datasets
import pickle
_DESCRIPTION = """\
Dataset for storing training metrics of pythia models
"""
class PythiaTrainingMetrics(datasets.GeneratorBasedBuilder):
MODEL_SIZES = [
"70m",
"160m",
#"410m",
"1.4b",
#"2.8b",
]
_GRADIENTS_DESCRIPTION = """\
Dataset for storing gradients of pythia models
"""
_WEIGHTS_DESCRIPTION = """\
Dataset for storing weights of pythia models
"""
_WEIGHTS_MINI_DESCRIPTION = """\
Dataset for storing weights of pythia models (minimizes the amount of gradients per
checkpoint to only 2)
"""
_ACTIVATIONS_DESCRIPTION = """\
Dataset for storing activations of pythia models
"""
BUILDER_CONFIGS = [
datasets.BuilderConfig(
name="gradients",
description=_WEIGHTS_DESCRIPTION,
version="1.0.0",
),
datasets.BuilderConfig(
name="gradients_mini",
description=_WEIGHTS_MINI_DESCRIPTION,
version="1.0.0",
),
datasets.BuilderConfig(
name="activations",
description=_ACTIVATIONS_DESCRIPTION,
version="1.0.0",
),
datasets.BuilderConfig(
name="weights",
description=_WEIGHTS_DESCRIPTION,
version="1.0.0",
),
]
def _info(self):
"""
NOTE: we might want to specify features, but since the featuers are different for each
model size it's annoying and kind of pointless since hf does it automatically
"""
return datasets.DatasetInfo(
description=_DESCRIPTION,
)
def _split_generators(self, dl_manager: datasets.DownloadManager):
"""
Returns data for different splits - we define a split as a model size.
"""
model_size_to_fp = { model_size: [] for model_size in self.MODEL_SIZES }
checkpoint_steps = [0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1000, ]
checkpoint_steps.extend([3000 + (i * 10000) for i in range(0, 15)])
def get_gradient_step(step: int):
"""
Return a list of the gradient steps that are used at a given checkpoint step.
"""
return list(range(max(0, step-5), min(step+6, 143_000)))
for model_size in self.MODEL_SIZES:
for checkpoint_step in checkpoint_steps:
directory_path = f"./models/{model_size}/checkpoint_{checkpoint_step}"
if self.config.name == "activations":
model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_activations.pickle")
elif self.config.name == "weights":
model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_weights.pickle")
elif self.config.name == "gradients":
for gradient_step in get_gradient_step(checkpoint_step):
model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_gradients_{gradient_step}.pickle")
elif self.config.name == "gradients_mini":
for gradient_step in get_gradient_step(checkpoint_step)[:2]:
model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_gradients_mini_{gradient_step}.pickle")
else:
raise Exception("Invalid config name")
downloaded_files = dl_manager.download_and_extract(model_size_to_fp)
return [
datasets.SplitGenerator(
name=model_size_name,
gen_kwargs={
"filepaths": downloaded_fps
}
) for model_size_name, downloaded_fps in downloaded_files.items()
]
def _generate_examples(self, filepaths):
# the filepaths should be a list of filepaths
if isinstance(filepaths, str):
filepaths = [filepaths]
global_idx = 0 # the unique identifier for the example
for filepath in filepaths:
with open(filepath, 'rb') as f:
data = pickle.load(f)
# extract checkpoint step from the filepath
checkpoint_step = int(filepath.split("/")[-2].split("_")[-1])
if self.config.name in ["activations", "weights"]:
for layer_name, layer_data in data.items():
yield global_idx, {"checkpoint_step": checkpoint_step, "layer_name": layer_name, "data": layer_data}
global_idx += 1
elif self.config.name in ["gradients", "gradients_mini"]:
gradient_step = int(filepath.split('/')[-1].split("_")[-1].split(".")[0])
for layer_name, layer_data in data.items():
yield global_idx, {"checkpoint_step": checkpoint_step, "layer_name": layer_name, "gradient_step": gradient_step, "data": layer_data}
global_idx += 1
|