gpt2-20b / config.json
nouamanetazi's picture
nouamanetazi HF staff
Create config.json
d016f99
raw
history blame
351 Bytes
{
"activation_function": "gelu_new",
"architectures": [
"GPT2LMHeadModel"
],
"attn_pdrop": 0.1,
"embd_pdrop": 0.1,
"bos_token_id": 0,
"eos_token_id": 0,
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-05,
"model_type": "gpt2",
"n_embd": 6144,
"n_head": 64,
"n_layer": 44,
"resid_pdrop": 0.1,
"vocab_size": 50432
}