gpt2_medium_en / tokenizer.json
mattdangerw's picture
Upload folder using huggingface_hub
7e6fbf6 verified
raw
history blame contribute delete
618 Bytes
{
"module": "keras_hub.src.models.gpt2.gpt2_tokenizer",
"class_name": "GPT2Tokenizer",
"config": {
"name": "gpt2_tokenizer",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "int32"
},
"registered_name": null
},
"config_file": "tokenizer.json",
"sequence_length": null,
"add_prefix_space": false,
"unsplittable_tokens": [
"<|endoftext|>"
]
},
"registered_name": "keras_hub>GPT2Tokenizer"
}