antypasd commited on
Commit
063bcf5
1 Parent(s): 4dd8f60

Upload model

Browse files
Files changed (2) hide show
  1. config.json +232 -0
  2. pytorch_model.bin +3 -0
config.json ADDED
@@ -0,0 +1,232 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "../../best_models/troberta-base-tweet-emoji/best_model/",
3
+ "architectures": [
4
+ "RobertaModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "classifier_dropout": null,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 768,
13
+ "id2label": {
14
+ "0": "\ud83d\ude02",
15
+ "1": "\ud83d\ude2d",
16
+ "2": "\u2764",
17
+ "3": "\ud83e\udd23",
18
+ "4": "\ud83d\udd25",
19
+ "5": "\ud83d\udfe9",
20
+ "6": "\ud83e\udd7a",
21
+ "7": "\ud83d\ude4f",
22
+ "8": "\ud83d\ude0d",
23
+ "9": "\ud83e\udd70",
24
+ "10": "\ud83e\udd14",
25
+ "11": "\ud83d\ude29",
26
+ "12": "\ud83d\udc4d",
27
+ "13": "\ud83d\ude44",
28
+ "14": "\ud83d\ude0a",
29
+ "15": "\ud83d\udc80",
30
+ "16": "\ud83d\udc4f",
31
+ "17": "\u2728",
32
+ "18": "\ud83d\ude05",
33
+ "19": "\ud83d\udc40",
34
+ "20": "\ud83d\udc95",
35
+ "21": "\ud83d\ude01",
36
+ "22": "\ud83d\udcaf",
37
+ "23": "\ud83d\ude80",
38
+ "24": "\ud83d\ude14",
39
+ "25": "\ud83d\ude18",
40
+ "26": "\ud83d\udd39",
41
+ "27": "\ud83d\udc96",
42
+ "28": "\ud83d\ude09",
43
+ "29": "\ud83d\udc94",
44
+ "30": "\ud83d\ude4c",
45
+ "31": "\ud83d\ude0e",
46
+ "32": "\u2705",
47
+ "33": "\ud83d\ude33",
48
+ "34": "\ud83d\ude06",
49
+ "35": "\ud83e\udd74",
50
+ "36": "\ud83e\udd72",
51
+ "37": "\ud83d\udc97",
52
+ "38": "\ud83d\ude0c",
53
+ "39": "\ud83e\udd17",
54
+ "40": "\ud83d\udc47",
55
+ "41": "\ud83c\udf89",
56
+ "42": "\ud83d\ude43",
57
+ "43": "\u263a",
58
+ "44": "\ud83e\udd21",
59
+ "45": "\ud83e\udd29",
60
+ "46": "\ud83e\udd73",
61
+ "47": "\ud83d\udc4c",
62
+ "48": "\ud83d\ude42",
63
+ "49": "\ud83d\ude21",
64
+ "50": "\ud83d\ude12",
65
+ "51": "\ud83d\ude00",
66
+ "52": "\ud83d\ude22",
67
+ "53": "\ud83e\udd26\u200d\u2642\ufe0f",
68
+ "54": "\ud83d\ude0f",
69
+ "55": "\ud83d\udea8",
70
+ "56": "\ud83d\ude2c",
71
+ "57": "\ud83d\udc49",
72
+ "58": "\ud83d\udcaa",
73
+ "59": "\ud83c\udfb6",
74
+ "60": "\ud83d\ude24",
75
+ "61": "\ud83d\ude08",
76
+ "62": "\ud83d\ude0b",
77
+ "63": "\ud83e\udd2a",
78
+ "64": "\ud83d\udc9e",
79
+ "65": "\ud83d\ude2b",
80
+ "66": "\ud83d\ude04",
81
+ "67": "\ud83e\udd2d",
82
+ "68": "\ud83e\udd79",
83
+ "69": "\ud83d\udcb0",
84
+ "70": "\ud83d\ude03",
85
+ "71": "\ud83d\ude1c",
86
+ "72": "\ud83d\udca5",
87
+ "73": "\ud83e\udd2c",
88
+ "74": "\ud83d\ude10",
89
+ "75": "\u270c",
90
+ "76": "\ud83e\udd24",
91
+ "77": "\ud83d\ude48",
92
+ "78": "\ud83e\udd28",
93
+ "79": "\ud83c\udf39",
94
+ "80": "\ud83d\ude1e",
95
+ "81": "\ud83e\udd27",
96
+ "82": "\ud83d\ude39",
97
+ "83": "\u2639",
98
+ "84": "\ud83d\udc93",
99
+ "85": "\ud83e\udd75",
100
+ "86": "\ud83d\ude31",
101
+ "87": "\ud83d\udc8e",
102
+ "88": "\ud83d\udca6",
103
+ "89": "\ud83e\udd2e",
104
+ "90": "\ud83e\udd1d",
105
+ "91": "\ud83d\udde3",
106
+ "92": "\ud83d\ude07",
107
+ "93": "\ud83e\udd37\u200d\u2642\ufe0f",
108
+ "94": "\ud83d\udc8b",
109
+ "95": "\ud83e\udd1e",
110
+ "96": "\ud83d\udea9",
111
+ "97": "\ud83e\udd2f",
112
+ "98": "\ud83c\udfc6",
113
+ "99": "\ud83d\ude11"
114
+ },
115
+ "initializer_range": 0.02,
116
+ "intermediate_size": 3072,
117
+ "label2id": {
118
+ "\u2639": 83,
119
+ "\u263a": 43,
120
+ "\u2705": 32,
121
+ "\u270c": 75,
122
+ "\u2728": 17,
123
+ "\u2764": 2,
124
+ "\ud83c\udf39": 79,
125
+ "\ud83c\udf89": 41,
126
+ "\ud83c\udfb6": 59,
127
+ "\ud83c\udfc6": 98,
128
+ "\ud83d\udc40": 19,
129
+ "\ud83d\udc47": 40,
130
+ "\ud83d\udc49": 57,
131
+ "\ud83d\udc4c": 47,
132
+ "\ud83d\udc4d": 12,
133
+ "\ud83d\udc4f": 16,
134
+ "\ud83d\udc80": 15,
135
+ "\ud83d\udc8b": 94,
136
+ "\ud83d\udc8e": 87,
137
+ "\ud83d\udc93": 84,
138
+ "\ud83d\udc94": 29,
139
+ "\ud83d\udc95": 20,
140
+ "\ud83d\udc96": 27,
141
+ "\ud83d\udc97": 37,
142
+ "\ud83d\udc9e": 64,
143
+ "\ud83d\udca5": 72,
144
+ "\ud83d\udca6": 88,
145
+ "\ud83d\udcaa": 58,
146
+ "\ud83d\udcaf": 22,
147
+ "\ud83d\udcb0": 69,
148
+ "\ud83d\udd25": 4,
149
+ "\ud83d\udd39": 26,
150
+ "\ud83d\udde3": 91,
151
+ "\ud83d\ude00": 51,
152
+ "\ud83d\ude01": 21,
153
+ "\ud83d\ude02": 0,
154
+ "\ud83d\ude03": 70,
155
+ "\ud83d\ude04": 66,
156
+ "\ud83d\ude05": 18,
157
+ "\ud83d\ude06": 34,
158
+ "\ud83d\ude07": 92,
159
+ "\ud83d\ude08": 61,
160
+ "\ud83d\ude09": 28,
161
+ "\ud83d\ude0a": 14,
162
+ "\ud83d\ude0b": 62,
163
+ "\ud83d\ude0c": 38,
164
+ "\ud83d\ude0d": 8,
165
+ "\ud83d\ude0e": 31,
166
+ "\ud83d\ude0f": 54,
167
+ "\ud83d\ude10": 74,
168
+ "\ud83d\ude11": 99,
169
+ "\ud83d\ude12": 50,
170
+ "\ud83d\ude14": 24,
171
+ "\ud83d\ude18": 25,
172
+ "\ud83d\ude1c": 71,
173
+ "\ud83d\ude1e": 80,
174
+ "\ud83d\ude21": 49,
175
+ "\ud83d\ude22": 52,
176
+ "\ud83d\ude24": 60,
177
+ "\ud83d\ude29": 11,
178
+ "\ud83d\ude2b": 65,
179
+ "\ud83d\ude2c": 56,
180
+ "\ud83d\ude2d": 1,
181
+ "\ud83d\ude31": 86,
182
+ "\ud83d\ude33": 33,
183
+ "\ud83d\ude39": 82,
184
+ "\ud83d\ude42": 48,
185
+ "\ud83d\ude43": 42,
186
+ "\ud83d\ude44": 13,
187
+ "\ud83d\ude48": 77,
188
+ "\ud83d\ude4c": 30,
189
+ "\ud83d\ude4f": 7,
190
+ "\ud83d\ude80": 23,
191
+ "\ud83d\udea8": 55,
192
+ "\ud83d\udea9": 96,
193
+ "\ud83d\udfe9": 5,
194
+ "\ud83e\udd14": 10,
195
+ "\ud83e\udd17": 39,
196
+ "\ud83e\udd1d": 90,
197
+ "\ud83e\udd1e": 95,
198
+ "\ud83e\udd21": 44,
199
+ "\ud83e\udd23": 3,
200
+ "\ud83e\udd24": 76,
201
+ "\ud83e\udd26\u200d\u2642\ufe0f": 53,
202
+ "\ud83e\udd27": 81,
203
+ "\ud83e\udd28": 78,
204
+ "\ud83e\udd29": 45,
205
+ "\ud83e\udd2a": 63,
206
+ "\ud83e\udd2c": 73,
207
+ "\ud83e\udd2d": 67,
208
+ "\ud83e\udd2e": 89,
209
+ "\ud83e\udd2f": 97,
210
+ "\ud83e\udd37\u200d\u2642\ufe0f": 93,
211
+ "\ud83e\udd70": 9,
212
+ "\ud83e\udd72": 36,
213
+ "\ud83e\udd73": 46,
214
+ "\ud83e\udd74": 35,
215
+ "\ud83e\udd75": 85,
216
+ "\ud83e\udd79": 68,
217
+ "\ud83e\udd7a": 6
218
+ },
219
+ "layer_norm_eps": 1e-05,
220
+ "max_position_embeddings": 514,
221
+ "model_type": "roberta",
222
+ "num_attention_heads": 12,
223
+ "num_hidden_layers": 12,
224
+ "pad_token_id": 1,
225
+ "position_embedding_type": "absolute",
226
+ "problem_type": "single_label_classification",
227
+ "torch_dtype": "float32",
228
+ "transformers_version": "4.30.2",
229
+ "type_vocab_size": 1,
230
+ "use_cache": true,
231
+ "vocab_size": 50265
232
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:859752673575bbdd8cf84a8307ecad0bf25ba3ad770aab37370d8eca46553516
3
+ size 498650925