XLM / config.json
spraveenkumar1318's picture
First Commit For XLM-Language Detection Model
4a73888
raw
history blame
3.33 kB
{
"_name_or_path": "xlm-roberta-base",
"architectures": [
"XLMRobertaForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "nl",
"1": "fr",
"2": "fi",
"3": "de",
"4": "en",
"5": "el",
"6": "hr",
"7": "ro",
"8": "pt",
"9": "es",
"10": "cs",
"11": "da",
"12": "it",
"13": "hu",
"14": "ar",
"15": "sv",
"16": "pl",
"17": "id",
"18": "ms",
"19": "bg",
"20": "pam",
"21": "th",
"22": "ko",
"23": "zh",
"24": "zh-Hant",
"25": "vi",
"26": "bs",
"27": "ur",
"28": "und",
"29": "lb",
"30": "mr",
"31": "haw",
"32": "te",
"33": "la",
"34": "ug",
"35": "az",
"36": "sk",
"37": "tr",
"38": "gn",
"39": "ja",
"40": "is",
"41": "af",
"42": "gu",
"43": "uz",
"44": "hi",
"45": "rw",
"46": "sq",
"47": "co",
"48": "sl",
"49": "kk",
"50": "ru",
"51": "uk",
"52": "tt",
"53": "ky",
"54": "mk",
"55": "sr",
"56": "mi",
"57": "ga",
"58": "ml",
"59": "st",
"60": "no",
"61": "ku",
"62": "fil",
"63": "sd",
"64": "yo",
"65": "fy",
"66": "tk",
"67": "ln",
"68": "et",
"69": "lv",
"70": "hy",
"71": "ka",
"72": "mn",
"73": "tg",
"74": "ca",
"75": "lt",
"76": "kn",
"77": "ps",
"78": "mg",
"79": "hmn",
"80": "ceb",
"81": "bn",
"82": "my",
"83": "gl"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"af": 41,
"ar": 14,
"az": 35,
"bg": 19,
"bn": 81,
"bs": 26,
"ca": 74,
"ceb": 80,
"co": 47,
"cs": 10,
"da": 11,
"de": 3,
"el": 5,
"en": 4,
"es": 9,
"et": 68,
"fi": 2,
"fil": 62,
"fr": 1,
"fy": 65,
"ga": 57,
"gl": 83,
"gn": 38,
"gu": 42,
"haw": 31,
"hi": 44,
"hmn": 79,
"hr": 6,
"hu": 13,
"hy": 70,
"id": 17,
"is": 40,
"it": 12,
"ja": 39,
"ka": 71,
"kk": 49,
"kn": 76,
"ko": 22,
"ku": 61,
"ky": 53,
"la": 33,
"lb": 29,
"ln": 67,
"lt": 75,
"lv": 69,
"mg": 78,
"mi": 56,
"mk": 54,
"ml": 58,
"mn": 72,
"mr": 30,
"ms": 18,
"my": 82,
"nl": 0,
"no": 60,
"pam": 20,
"pl": 16,
"ps": 77,
"pt": 8,
"ro": 7,
"ru": 50,
"rw": 45,
"sd": 63,
"sk": 36,
"sl": 48,
"sq": 46,
"sr": 55,
"st": 59,
"sv": 15,
"te": 32,
"tg": 73,
"th": 21,
"tk": 66,
"tr": 37,
"tt": 52,
"ug": 34,
"uk": 51,
"und": 28,
"ur": 27,
"uz": 43,
"vi": 25,
"yo": 64,
"zh": 23,
"zh-Hant": 24
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "xlm-roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"output_past": true,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"problem_type": "single_label_classification",
"torch_dtype": "float32",
"transformers_version": "4.28.0",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 250002
}