{ | |
"add_bos_token": false, | |
"add_prefix_space": false, | |
"bos_token": { | |
"__type": "AddedToken", | |
"content": "<|endoftext|>", | |
"lstrip": false, | |
"normalized": true, | |
"rstrip": false, | |
"single_word": false | |
}, | |
"eos_token": { | |
"__type": "AddedToken", | |
"content": "<|endoftext|>", | |
"lstrip": false, | |
"normalized": true, | |
"rstrip": false, | |
"single_word": false | |
}, | |
"errors": "replace", | |
"name_or_path": "hadidev/gpt2-urdu-tokenizer", | |
"pad_token": null, | |
"special_tokens_map_file": "/root/.cache/huggingface/transformers/fd05696fd7c524ed400d964c4d1fa66c6435bc0d588c55f7ac98c7c850c7cc5a.b7f8742f1d370b815979aeabc401aed45c79760724667d2725ac7503c242b97f", | |
"tokenizer_class": "GPT2Tokenizer", | |
"unk_token": { | |
"__type": "AddedToken", | |
"content": "<|endoftext|>", | |
"lstrip": false, | |
"normalized": true, | |
"rstrip": false, | |
"single_word": false | |
} | |
} | |