roberta-finetuned-stsbenchmark / tokenizer_config.json
elsayedissa's picture
first model commit
4cc90f9
raw
history blame contribute delete
448 Bytes
{"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "trim_offsets": true, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "/Users/elsayedissa/.cache/torch/sentence_transformers/sentence-transformers_roberta-large-nli-stsb-mean-tokens/", "tokenizer_class": "RobertaTokenizer"}