system HF staff commited on
Commit
8f06ee5
1 Parent(s): a2836ea

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -4
config.json CHANGED
@@ -1,12 +1,15 @@
1
  {
 
 
 
2
  "attention_probs_dropout_prob": 0.1,
3
- "bos_token_id": 0,
4
  "directionality": "bidi",
5
  "do_sample": false,
6
- "eos_token_ids": 0,
7
  "finetuning_task": null,
8
  "hidden_act": "gelu",
9
- "hidden_dropout_prob": 0.1,
10
  "hidden_size": 768,
11
  "id2label": {
12
  "0": "LABEL_0",
@@ -32,7 +35,7 @@
32
  "output_attentions": false,
33
  "output_hidden_states": false,
34
  "output_past": true,
35
- "pad_token_id": 0,
36
  "pooler_fc_size": 768,
37
  "pooler_num_attention_heads": 12,
38
  "pooler_num_fc_layers": 3,
 
1
  {
2
+ "architectures": [
3
+ "BertForSequenceClassification"
4
+ ],
5
  "attention_probs_dropout_prob": 0.1,
6
+ "bos_token_id": null,
7
  "directionality": "bidi",
8
  "do_sample": false,
9
+ "eos_token_ids": null,
10
  "finetuning_task": null,
11
  "hidden_act": "gelu",
12
+ "hidden_dropout_prob": 0.3,
13
  "hidden_size": 768,
14
  "id2label": {
15
  "0": "LABEL_0",
 
35
  "output_attentions": false,
36
  "output_hidden_states": false,
37
  "output_past": true,
38
+ "pad_token_id": null,
39
  "pooler_fc_size": 768,
40
  "pooler_num_attention_heads": 12,
41
  "pooler_num_fc_layers": 3,