Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -22,7 +22,7 @@
22
  "intermediate_size": 3072,
23
  "layer_norm_eps": 1e-12,
24
  "max_position_embeddings": 8192,
25
- "model_max_length": 8192,
26
  "model_type": "bert",
27
  "num_attention_heads": 12,
28
  "num_hidden_layers": 12,
 
22
  "intermediate_size": 3072,
23
  "layer_norm_eps": 1e-12,
24
  "max_position_embeddings": 8192,
25
+ "model_max_length": 1024,
26
  "model_type": "bert",
27
  "num_attention_heads": 12,
28
  "num_hidden_layers": 12,