Pengcheng He commited on
Commit
b2a877b
1 Parent(s): d21e12b

DeBERTa V2 XLarge 900M model

Browse files
Files changed (1) hide show
  1. config.json +0 -1
config.json CHANGED
@@ -19,7 +19,6 @@
19
  "num_attention_heads": 24,
20
  "attention_head_size": 64,
21
  "num_hidden_layers": 24,
22
- "self_attention_type": "dense_att",
23
  "type_vocab_size": 0,
24
  "vocab_size": 128100
25
  }
 
19
  "num_attention_heads": 24,
20
  "attention_head_size": 64,
21
  "num_hidden_layers": 24,
 
22
  "type_vocab_size": 0,
23
  "vocab_size": 128100
24
  }