{ "_name_or_path": "FacebookAI/xlm-roberta-base", "architectures": [ "ZettHypernet" ], "attention_probs_dropout_prob": 0.1, "auto_map": { "AutoConfig": "configuration_hypernet.ZettHypernetConfig", "AutoModel": "modeling_hypernet.ZettHypernet" }, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "hn_add_inter_token_attention": false, "hn_concat_last_hidden_state": false, "hn_embed_lang_id": true, "hn_embed_target_priors": false, "hn_embed_using_source_embeddings": true, "hn_hidden_size": 768, "hn_inter_token_attention_bias_by_priors": true, "hn_inter_token_attention_bias_scaler": 1.0, "hn_intermediate_size": 1536, "hn_language_adapter_bottleneck_dim": 0, "hn_model_name_or_path": "roberta-base", "hn_model_type": "roberta", "hn_n_extra_tokens": 161, "hn_n_inter_token_blocks": 16, "hn_n_layers": 3, "hn_num_attention_heads": 12, "hn_predict_bias": true, "hn_rescale_embeddings": true, "hn_single_head": false, "hn_surface_maxlen": 7, "initializer_range": 0.02, "intermediate_size": 3072, "langs": [ "en", "ru", "de", "es", "fr", "it", "pt", "el", "ko", "fi", "id", "tr", "ar", "vi", "bg", "ca", "hi", "et", "bn", "ta", "ur", "sw", "te", "eu", "ht", "qu" ], "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "n_embd": 768, "n_langs": 26, "name": "v7:xlmr:multilingual_long:lw=0.5_26l", "num_attention_heads": 12, "num_hidden_layers": 12, "original_vocab_size": 250002, "output_past": true, "pad_token_id": 1, "position_embedding_type": "absolute", "separate_out_embeddings": false, "torch_dtype": "float32", "transformers_version": "4.39.0.dev0", "type_vocab_size": 1, "use_cache": true, "use_unigram_bias": true, "vocab_size": 32896, "wandb_run_id": "eyql5ryv" }