nbroad HF staff commited on
Commit
64b8eb1
1 Parent(s): 516ecec

add label2id

Browse files
Files changed (1) hide show
  1. config.json +10 -3
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "_name_or_path": "state_dict.pt",
3
  "architectures": [
4
- "RobertaForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
@@ -10,8 +10,16 @@
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 768,
 
 
 
 
13
  "initializer_range": 0.02,
14
  "intermediate_size": 3072,
 
 
 
 
15
  "layer_norm_eps": 1e-05,
16
  "max_position_embeddings": 514,
17
  "model_type": "roberta",
@@ -19,7 +27,6 @@
19
  "num_hidden_layers": 12,
20
  "pad_token_id": 1,
21
  "position_embedding_type": "absolute",
22
- "torch_dtype": "float32",
23
  "transformers_version": "4.24.0",
24
  "type_vocab_size": 1,
25
  "use_cache": true,
 
1
  {
2
+ "_name_or_path": "roberta-base",
3
  "architectures": [
4
+ "RobertaForMaskedLM"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
 
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 768,
13
+ "id2label": {
14
+ "0": "Fake",
15
+ "1": "Real"
16
+ },
17
  "initializer_range": 0.02,
18
  "intermediate_size": 3072,
19
+ "label2id": {
20
+ "Fake": 0,
21
+ "Real": 1
22
+ },
23
  "layer_norm_eps": 1e-05,
24
  "max_position_embeddings": 514,
25
  "model_type": "roberta",
 
27
  "num_hidden_layers": 12,
28
  "pad_token_id": 1,
29
  "position_embedding_type": "absolute",
 
30
  "transformers_version": "4.24.0",
31
  "type_vocab_size": 1,
32
  "use_cache": true,