{ "_name_or_path": "FacebookAI/xlm-roberta-large", "architectures": [ "XLMRobertaForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 1024, "id2label": { "0": "O", "1": "S\u1ed1 \u0111i\u1ec7n tho\u1ea1i nh\u1eadn h\u00e0ng", "2": "\u0110\u1ecba ch\u1ec9 nh\u1eadn h\u00e0ng", "3": "\u0110\u1ed9 d\u00e0i t\u00f3c", "4": "\u0110\u01a1n v\u1ecb s\u1ea3n ph\u1ea9m", "5": "S\u1ea3n ph\u1ea9m", "6": "Cung m\u1ec7nh", "7": "Chi\u1ec1u cao kh\u00e1ch h\u00e0ng", "8": "C\u00e2n n\u1eb7ng kh\u00e1ch h\u00e0ng", "9": "M\u00e3 s\u1ea3n ph\u1ea9m", "10": "M\u00e0u s\u1eafc s\u1ea3n ph\u1ea9m", "11": "Gi\u00e1 s\u1ea3n ph\u1ea9m", "12": "S\u1ed1 l\u01b0\u1ee3ng s\u1ea3n ph\u1ea9m", "13": "Kh\u1ed1i l\u01b0\u1ee3ng s\u1ea3n ph\u1ea9m", "14": "D\u00e1ng khu\u00f4n m\u1eb7t", "15": "D\u00e1ng ng\u01b0\u1eddi", "16": "S\u1ed1 \u0111o v\u00f2ng 2", "17": "S\u1ed1 \u0111o v\u00f2ng 3", "18": "S\u1ed1 \u0111o v\u00f2ng 1", "19": "Chi\u1ec1u d\u00e0i s\u1ea3n ph\u1ea9m", "20": "Chi\u1ec1u r\u1ed9ng s\u1ea3n ph\u1ea9m", "21": "Th\u1eddi gian nh\u1eadn h\u00e0ng", "22": "M\u00e0u da", "23": "Ch\u1ea5t t\u00f3c", "24": "\u0110\u1ed9 tu\u1ed5i kh\u00e1ch h\u00e0ng", "25": "Phong c\u00e1ch th\u1eddi trang", "26": "\u0110\u1eb7c \u0111i\u1ec3m kh\u00e1c c\u1ee7a da", "27": "Phong c\u00e1ch nh\u00e0 c\u1eeda", "28": "Cung ho\u00e0ng \u0111\u1ea1o", "29": "M\u1eadt \u0111\u1ed9 t\u00f3c", "30": "Phong c\u00e1ch mua s\u1eafm", "31": "Lo\u1ea1i da" }, "initializer_range": 0.02, "intermediate_size": 4096, "label2id": { "Chi\u1ec1u cao kh\u00e1ch h\u00e0ng": 7, "Chi\u1ec1u d\u00e0i s\u1ea3n ph\u1ea9m": 19, "Chi\u1ec1u r\u1ed9ng s\u1ea3n ph\u1ea9m": 20, "Ch\u1ea5t t\u00f3c": 23, "Cung ho\u00e0ng \u0111\u1ea1o": 28, "Cung m\u1ec7nh": 6, "C\u00e2n n\u1eb7ng kh\u00e1ch h\u00e0ng": 8, "D\u00e1ng khu\u00f4n m\u1eb7t": 14, "D\u00e1ng ng\u01b0\u1eddi": 15, "Gi\u00e1 s\u1ea3n ph\u1ea9m": 11, "Kh\u1ed1i l\u01b0\u1ee3ng s\u1ea3n ph\u1ea9m": 13, "Lo\u1ea1i da": 31, "M\u00e0u da": 22, "M\u00e0u s\u1eafc s\u1ea3n ph\u1ea9m": 10, "M\u00e3 s\u1ea3n ph\u1ea9m": 9, "M\u1eadt \u0111\u1ed9 t\u00f3c": 29, "O": 0, "Phong c\u00e1ch mua s\u1eafm": 30, "Phong c\u00e1ch nh\u00e0 c\u1eeda": 27, "Phong c\u00e1ch th\u1eddi trang": 25, "S\u1ea3n ph\u1ea9m": 5, "S\u1ed1 l\u01b0\u1ee3ng s\u1ea3n ph\u1ea9m": 12, "S\u1ed1 \u0111i\u1ec7n tho\u1ea1i nh\u1eadn h\u00e0ng": 1, "S\u1ed1 \u0111o v\u00f2ng 1": 18, "S\u1ed1 \u0111o v\u00f2ng 2": 16, "S\u1ed1 \u0111o v\u00f2ng 3": 17, "Th\u1eddi gian nh\u1eadn h\u00e0ng": 21, "\u0110\u01a1n v\u1ecb s\u1ea3n ph\u1ea9m": 4, "\u0110\u1eb7c \u0111i\u1ec3m kh\u00e1c c\u1ee7a da": 26, "\u0110\u1ecba ch\u1ec9 nh\u1eadn h\u00e0ng": 2, "\u0110\u1ed9 d\u00e0i t\u00f3c": 3, "\u0110\u1ed9 tu\u1ed5i kh\u00e1ch h\u00e0ng": 24 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "xlm-roberta", "num_attention_heads": 16, "num_hidden_layers": 24, "output_past": true, "pad_token_id": 1, "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.44.0", "type_vocab_size": 1, "use_cache": true, "vocab_size": 250002 }