tiny_mixtral_ja_with_tokenizer / tokenizer_config.json
hibikaze's picture
upload model
f1660c1 verified
raw
history blame contribute delete
385 Bytes
{
"bos_token": "<BOS>",
"eos_token": "<EOS>",
"mask_token": "<MASK>",
"model_max_length": 1000000000000000019884624838656,
"pad_token": "<PAD>",
"unk_token": "<UNK>",
"clean_up_tokenization_spaces": true,
"tokenizer_class": "SentencePieceJA",
"auto_map": {
"AutoTokenizer": ["","sentencepiece_ja.SentencePieceJA"]
},
"transformers_version": " 4.34.1"
}