type: pharia attention_bias: true attention_dropout: 0.0 eos_token_id: 0 bos_token_id: 127179 pad_token_id: 1 hidden_act: gelu hidden_size: 512 initializer_range: 0.02 intermediate_size: 1024 max_position_embeddings: 2048 mlp_bias: true num_attention_heads: 8 num_hidden_layers: 6 num_key_value_heads: 8 rope_scaling: null rope_theta: 1000000 tie_word_embeddings: false use_cache: true context_length: 2048 vocab_size: 178