andstor commited on
Commit
45b2a55
1 Parent(s): 43b9765

Fix wrong model_max_length

Browse files

The model has a context window of 2048 (`n_ctx`). The tokenizer should also support the same length.

Files changed (1) hide show
  1. tokenizer_config.json +1 -1
tokenizer_config.json CHANGED
@@ -2,7 +2,7 @@
2
  "add_prefix_space": false,
3
  "bos_token": "<|endoftext|>",
4
  "eos_token": "<|endoftext|>",
5
- "model_max_length": 1024,
6
  "name_or_path": "gpt2",
7
  "special_tokens_map_file": null,
8
  "tokenizer_class": "GPT2Tokenizer",
 
2
  "add_prefix_space": false,
3
  "bos_token": "<|endoftext|>",
4
  "eos_token": "<|endoftext|>",
5
+ "model_max_length": 2048,
6
  "name_or_path": "gpt2",
7
  "special_tokens_map_file": null,
8
  "tokenizer_class": "GPT2Tokenizer",