Stancld commited on
Commit
cff30bf
1 Parent(s): 452ea23

Fix untie word embeddings

Browse files
Files changed (1) hide show
  1. config.json +1 -0
config.json CHANGED
@@ -25,6 +25,7 @@
25
  "pad_token_id": 0,
26
  "relative_attention_max_distance": 128,
27
  "relative_attention_num_buckets": 32,
 
28
  "torch_dtype": "float32",
29
  "transformers_version": "4.19.0.dev0",
30
  "use_cache": true,
 
25
  "pad_token_id": 0,
26
  "relative_attention_max_distance": 128,
27
  "relative_attention_num_buckets": 32,
28
+ "tie_word_embeddings": false,
29
  "torch_dtype": "float32",
30
  "transformers_version": "4.19.0.dev0",
31
  "use_cache": true,