File size: 5,751 Bytes
b1f58e7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
{
  "_name_or_path": "FacebookAI/xlm-roberta-large",
  "architectures": [
    "XLMRobertaForTokenClassification"
  ],
  "attention_probs_dropout_prob": 0.1,
  "bos_token_id": 0,
  "classifier_dropout": null,
  "eos_token_id": 2,
  "hidden_act": "gelu",
  "hidden_dropout_prob": 0.1,
  "hidden_size": 1024,
  "id2label": {
    "0": "O",
    "1": "Gi\u1edbi t\u00ednh",
    "2": "Phong c\u00e1ch",
    "3": "Lo\u1ea1i ph\u1ee5 ki\u1ec7n \u0111i\u1ec7n t\u1eed",
    "4": "Lo\u1ea1i \u0111\u1ed3 v\u1eadt s\u01b0u t\u1ea7m",
    "5": "M\u00f9i h\u01b0\u01a1ng",
    "6": "Ch\u1ee9c n\u0103ng h\u1ed7 tr\u1ee3 s\u1ee9c kh\u1ecfe",
    "7": "T\u00ecnh tr\u1ea1ng",
    "8": "C\u1ed5 \u00e1o",
    "9": "C\u00e2n n\u1eb7ng",
    "10": "M\u1eabu xe t\u01b0\u01a1ng th\u00edch",
    "11": "Lo\u1ea1i \u0111\u1ed3 gia d\u1ee5ng b\u1ebfp",
    "12": "Lo\u1ea1i \u1ed1p",
    "13": "H\u01b0\u01a1ng v\u1ecb",
    "14": "Lo\u1ea1i tr\u00e1i c\u00e2y",
    "15": "Th\u00e0nh Ph\u1ea7n Ho\u1ea1t T\u00ednh",
    "16": "Lo\u1ea1i \u0111\u1ed3 ch\u01a1i",
    "17": "Dung t\u00edch",
    "18": "Xu\u1ea5t x\u1ee9",
    "19": "T\u00edch h\u1ee3p ph\u1ee5 ki\u1ec7n",
    "20": "Lo\u1ea1i \u0111\u1ed3 d\u00f9ng ph\u00f2ng ng\u1ee7",
    "21": "Lo\u1ea1i d\u1ee5ng c\u1ee5 & thi\u1ebft b\u1ecb s\u1eeda ch\u1eefa",
    "22": "Lo\u1ea1i v\u1eadt d\u1ee5ng l\u00e0m v\u01b0\u1eddn",
    "23": "Ch\u1ee9c n\u0103ng l\u00e0m \u0111\u1eb9p",
    "24": "Lo\u1ea1i \u0111\u1ed3 d\u00f9ng ch\u0103m s\u00f3c b\u00e9",
    "25": "Tr\u1ecdng l\u01b0\u1ee3ng",
    "26": "Thi\u1ebft b\u1ecb t\u01b0\u01a1ng th\u00edch",
    "27": "H\u00ecnh th\u1ee9c s\u1ea3n ph\u1ea9m",
    "28": "Lo\u1ea1i thi\u1ebft b\u1ecb v\u00e0 d\u1ee5ng c\u1ee5 l\u00e0m \u0111\u1eb9p",
    "29": "Lo\u1ea1i trang ph\u1ee5c",
    "30": "Th\u00e0nh ph\u1ea7n",
    "31": "Ch\u1ea5t li\u1ec7u",
    "32": "Lo\u1ea1i \u0111\u1ed3 d\u00f9ng v\u0103n ph\u00f2ng ph\u1ea9m",
    "33": "Lo\u1ea1i ph\u1ee5 ki\u1ec7n nh\u00e0 b\u1ebfp",
    "34": "Lo\u1ea1i thi\u1ebft b\u1ecb v\u00e0 linh ki\u1ec7n \u0111i\u1ec7n t\u1eed",
    "35": "Lo\u1ea1i trang s\u1ee9c / ph\u1ee5 ki\u1ec7n",
    "36": "Size",
    "37": "Lo\u1ea1i th\u1ee9c \u0103n ch\u1ebf bi\u1ebfn s\u1eb5n",
    "38": "Th\u1ec3 lo\u1ea1i s\u00e1ch truy\u1ec7n",
    "39": "Lo\u1ea1i v\u1eadt d\u1ee5ng ch\u0103m s\u00f3c c\u00e1 nh\u00e2n",
    "40": "T\u00ednh n\u0103ng",
    "41": "Th\u01b0\u01a1ng hi\u1ec7u",
    "42": "Ki\u1ec3u \u0111\u00f3ng g\u00f3i",
    "43": "Lo\u1ea1i thu\u1ed1c & th\u1ef1c ph\u1ea9m ch\u1ee9c n\u0103ng",
    "44": "Lo\u1ea1i ph\u1ee5 ki\u1ec7n / linh ki\u1ec7n xe",
    "45": "Lo\u1ea1i v\u1eadt d\u1ee5ng trong nh\u00e0 kh\u00e1c",
    "46": "M\u00e0u s\u1eafc",
    "47": "Lo\u1ea1i th\u1ef1c ph\u1ea9m",
    "48": "Lo\u1ea1i gi\u1ed1ng",
    "49": "M\u1eabu",
    "50": "Lo\u1ea1i s\u1ea3n ph\u1ea9m l\u00e0m \u0111\u1eb9p"
  },
  "initializer_range": 0.02,
  "intermediate_size": 4096,
  "label2id": {
    "Ch\u1ea5t li\u1ec7u": 31,
    "Ch\u1ee9c n\u0103ng h\u1ed7 tr\u1ee3 s\u1ee9c kh\u1ecfe": 6,
    "Ch\u1ee9c n\u0103ng l\u00e0m \u0111\u1eb9p": 23,
    "C\u00e2n n\u1eb7ng": 9,
    "C\u1ed5 \u00e1o": 8,
    "Dung t\u00edch": 17,
    "Gi\u1edbi t\u00ednh": 1,
    "H\u00ecnh th\u1ee9c s\u1ea3n ph\u1ea9m": 27,
    "H\u01b0\u01a1ng v\u1ecb": 13,
    "Ki\u1ec3u \u0111\u00f3ng g\u00f3i": 42,
    "Lo\u1ea1i d\u1ee5ng c\u1ee5 & thi\u1ebft b\u1ecb s\u1eeda ch\u1eefa": 21,
    "Lo\u1ea1i gi\u1ed1ng": 48,
    "Lo\u1ea1i ph\u1ee5 ki\u1ec7n / linh ki\u1ec7n xe": 44,
    "Lo\u1ea1i ph\u1ee5 ki\u1ec7n nh\u00e0 b\u1ebfp": 33,
    "Lo\u1ea1i ph\u1ee5 ki\u1ec7n \u0111i\u1ec7n t\u1eed": 3,
    "Lo\u1ea1i s\u1ea3n ph\u1ea9m l\u00e0m \u0111\u1eb9p": 50,
    "Lo\u1ea1i thi\u1ebft b\u1ecb v\u00e0 d\u1ee5ng c\u1ee5 l\u00e0m \u0111\u1eb9p": 28,
    "Lo\u1ea1i thi\u1ebft b\u1ecb v\u00e0 linh ki\u1ec7n \u0111i\u1ec7n t\u1eed": 34,
    "Lo\u1ea1i thu\u1ed1c & th\u1ef1c ph\u1ea9m ch\u1ee9c n\u0103ng": 43,
    "Lo\u1ea1i th\u1ee9c \u0103n ch\u1ebf bi\u1ebfn s\u1eb5n": 37,
    "Lo\u1ea1i th\u1ef1c ph\u1ea9m": 47,
    "Lo\u1ea1i trang ph\u1ee5c": 29,
    "Lo\u1ea1i trang s\u1ee9c / ph\u1ee5 ki\u1ec7n": 35,
    "Lo\u1ea1i tr\u00e1i c\u00e2y": 14,
    "Lo\u1ea1i v\u1eadt d\u1ee5ng ch\u0103m s\u00f3c c\u00e1 nh\u00e2n": 39,
    "Lo\u1ea1i v\u1eadt d\u1ee5ng l\u00e0m v\u01b0\u1eddn": 22,
    "Lo\u1ea1i v\u1eadt d\u1ee5ng trong nh\u00e0 kh\u00e1c": 45,
    "Lo\u1ea1i \u0111\u1ed3 ch\u01a1i": 16,
    "Lo\u1ea1i \u0111\u1ed3 d\u00f9ng ch\u0103m s\u00f3c b\u00e9": 24,
    "Lo\u1ea1i \u0111\u1ed3 d\u00f9ng ph\u00f2ng ng\u1ee7": 20,
    "Lo\u1ea1i \u0111\u1ed3 d\u00f9ng v\u0103n ph\u00f2ng ph\u1ea9m": 32,
    "Lo\u1ea1i \u0111\u1ed3 gia d\u1ee5ng b\u1ebfp": 11,
    "Lo\u1ea1i \u0111\u1ed3 v\u1eadt s\u01b0u t\u1ea7m": 4,
    "Lo\u1ea1i \u1ed1p": 12,
    "M\u00e0u s\u1eafc": 46,
    "M\u00f9i h\u01b0\u01a1ng": 5,
    "M\u1eabu": 49,
    "M\u1eabu xe t\u01b0\u01a1ng th\u00edch": 10,
    "O": 0,
    "Phong c\u00e1ch": 2,
    "Size": 36,
    "Thi\u1ebft b\u1ecb t\u01b0\u01a1ng th\u00edch": 26,
    "Th\u00e0nh Ph\u1ea7n Ho\u1ea1t T\u00ednh": 15,
    "Th\u00e0nh ph\u1ea7n": 30,
    "Th\u01b0\u01a1ng hi\u1ec7u": 41,
    "Th\u1ec3 lo\u1ea1i s\u00e1ch truy\u1ec7n": 38,
    "Tr\u1ecdng l\u01b0\u1ee3ng": 25,
    "T\u00ecnh tr\u1ea1ng": 7,
    "T\u00edch h\u1ee3p ph\u1ee5 ki\u1ec7n": 19,
    "T\u00ednh n\u0103ng": 40,
    "Xu\u1ea5t x\u1ee9": 18
  },
  "layer_norm_eps": 1e-05,
  "max_position_embeddings": 514,
  "model_type": "xlm-roberta",
  "num_attention_heads": 16,
  "num_hidden_layers": 24,
  "output_past": true,
  "pad_token_id": 1,
  "position_embedding_type": "absolute",
  "torch_dtype": "float32",
  "transformers_version": "4.44.2",
  "type_vocab_size": 1,
  "use_cache": true,
  "vocab_size": 250002
}