File size: 294 Bytes
6a09d98
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
{
  "clean_up_tokenization_spaces": true,
  "cls_token": "[CLS]",
  "mask_token": "[MASK]",
  "model_max_length": 512,
  "pad_token": "[PAD]",
  "sep_token": "[SEP]",
  "tokenizer_class": "PreTrainedTokenizerFast",
  "unk_token": "[UNK]",
  "vocab": "bert-uncased-tokenizer-danish/vocab.txt"
}