system HF staff commited on
Commit
a1d55b6
1 Parent(s): d5f24ab

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -26
config.json CHANGED
@@ -7,45 +7,26 @@
7
  "bos_index": 0,
8
  "bos_token_id": 0,
9
  "causal": false,
10
- "do_sample": false,
11
  "dropout": 0.1,
12
  "emb_dim": 512,
13
  "embed_init_std": 0.02209708691207961,
14
  "end_n_top": 5,
15
  "eos_index": 1,
16
- "finetuning_task": null,
17
  "gelu_activation": true,
18
- "id2label": {
19
- "0": "LABEL_0",
20
- "1": "LABEL_1"
21
- },
22
  "init_std": 0.02,
23
- "is_decoder": false,
24
  "is_encoder": true,
25
- "label2id": {
26
- "LABEL_0": 0,
27
- "LABEL_1": 1
28
- },
29
  "lang_id": 0,
30
  "layer_norm_eps": 1e-12,
31
- "length_penalty": 1.0,
32
  "mask_index": 5,
33
  "mask_token_id": 0,
34
- "max_length": 20,
35
  "max_position_embeddings": 512,
36
  "model_type": "xlm",
37
  "n_heads": 16,
38
  "n_langs": 1,
39
  "n_layers": 8,
40
- "num_beams": 1,
41
- "num_labels": 2,
42
- "num_return_sequences": 1,
43
- "output_attentions": false,
44
- "output_hidden_states": false,
45
  "output_past": true,
46
  "pad_index": 2,
47
- "pruned_heads": {},
48
- "repetition_penalty": 1.0,
49
  "sinusoidal_embeddings": false,
50
  "start_n_top": 5,
51
  "summary_activation": null,
@@ -53,12 +34,7 @@
53
  "summary_proj_to_labels": true,
54
  "summary_type": "first",
55
  "summary_use_proj": true,
56
- "temperature": 1.0,
57
- "top_k": 50,
58
- "top_p": 1.0,
59
- "torchscript": false,
60
  "unk_index": 3,
61
- "use_bfloat16": false,
62
  "use_lang_emb": true,
63
  "vocab_size": 30058
64
- }
 
7
  "bos_index": 0,
8
  "bos_token_id": 0,
9
  "causal": false,
 
10
  "dropout": 0.1,
11
  "emb_dim": 512,
12
  "embed_init_std": 0.02209708691207961,
13
  "end_n_top": 5,
14
  "eos_index": 1,
 
15
  "gelu_activation": true,
 
 
 
 
16
  "init_std": 0.02,
 
17
  "is_encoder": true,
 
 
 
 
18
  "lang_id": 0,
19
  "layer_norm_eps": 1e-12,
 
20
  "mask_index": 5,
21
  "mask_token_id": 0,
 
22
  "max_position_embeddings": 512,
23
  "model_type": "xlm",
24
  "n_heads": 16,
25
  "n_langs": 1,
26
  "n_layers": 8,
 
 
 
 
 
27
  "output_past": true,
28
  "pad_index": 2,
29
+ "pad_token_id": 2,
 
30
  "sinusoidal_embeddings": false,
31
  "start_n_top": 5,
32
  "summary_activation": null,
 
34
  "summary_proj_to_labels": true,
35
  "summary_type": "first",
36
  "summary_use_proj": true,
 
 
 
 
37
  "unk_index": 3,
 
38
  "use_lang_emb": true,
39
  "vocab_size": 30058
40
+ }