_DESCRIPTION="""\ Dataset for storing training evaluations of pythia models, e.g. loss, perplexity """ import datasets import json class PythiaTrainingEvals(datasets.GeneratorBasedBuilder): MODEL_SIZES = [ "70m", "160m", "410m", "1.4b", "2.8b", ] BUILDER_CONFIGS = [] for model_size in MODEL_SIZES: BUILDER_CONFIGS.extend([ datasets.BuilderConfig( name=f"{model_size}", description=f"Dataset of pythia training evaluation metrics for pythia model size: {model_size}", version="1.0.0", ), ]) def _info(self): return datasets.DatasetInfo( description=_DESCRIPTION, ) def _split_generators(self, dl_manager: datasets.DownloadManager): """ Returns data for different splits - we define a split as a model size. """ checkpoint_steps = [0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1000, ] checkpoint_steps.extend([3000 + (i * 10000) for i in range(0, 15)]) to_download_files = [] model_size = self.config.name.split("__")[0] for checkpoint_step in checkpoint_steps: to_download_files.append(f"./models/{model_size}/checkpoint_{checkpoint_step}/evals.json") downloaded_files = dl_manager.download_and_extract(to_download_files) return [ datasets.SplitGenerator( name='default', gen_kwargs={ "filepaths": downloaded_files, } ) ] def _generate_examples(self, filepaths): """ Yields examples from each file in filepaths that are stored as jsons with the evaluation metrics for a given checkpoint step. """ checkpoint_steps = [0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1000, ] checkpoint_steps.extend([3000 + (i * 10000) for i in range(0, 15)]) # the filepaths should be a list of filepaths if isinstance(filepaths, str): filepaths = [filepaths] for idx, filepath in enumerate(filepaths): with open(filepath, 'rb') as f: data = json.load(f) record = { "checkpoint_step": checkpoint_steps[idx], **data } yield idx, record