system HF staff commited on
Commit
d6d2012
1 Parent(s): 40af04c

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +27 -0
config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "RobertaForMaskedLM"
4
+ ],
5
+ "attention_probs_dropout_prob": 0.1,
6
+ "divide": "two_stage",
7
+ "finetuning_task": "mnli",
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.1,
10
+ "hidden_size": 768,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 3072,
13
+ "is_decoder": false,
14
+ "layer_norm_eps": 1e-05,
15
+ "max_position_embeddings": 514,
16
+ "num_attention_heads": 12,
17
+ "num_hidden_layers": 12,
18
+ "num_labels": 3,
19
+ "output_attentions": false,
20
+ "output_hidden_states": false,
21
+ "output_past": true,
22
+ "pruned_heads": {},
23
+ "torchscript": false,
24
+ "type_vocab_size": 1,
25
+ "use_bfloat16": false,
26
+ "vocab_size": 50265
27
+ }