anikethjr's picture
Training in progress, step 250
6bf1998
raw
history blame contribute delete
451 Bytes
{
"model_max_length": 512,
"name_or_path": "/global/scratch/users/aniketh/PromoGen/SentencePieceUnigramTokenizer_4096_min_exp_2_fast",
"special_tokens": [
"<BOS>",
"<EOS>",
"<PAD>",
"<UNK>",
"<CLS>",
"<SEP>",
"<MASK>"
],
"special_tokens_map_file": "/global/scratch/users/aniketh/PromoGen/SentencePieceUnigramTokenizer_4096_min_exp_2_fast/special_tokens_map.json",
"tokenizer_class": "PreTrainedTokenizerFast"
}