pico / config.json
rdiehlmartinez's picture
Upload Pico
6ad6e93 verified
raw
history blame contribute delete
508 Bytes
{
"activation_hidden_dim": 768,
"architectures": [
"Pico"
],
"attention_n_heads": 12,
"attention_n_kv_heads": 4,
"auto_map": {
"AutoConfig": "model.PicoConfig",
"AutoModel": "model.Pico"
},
"batch_size": 2,
"d_model": 192,
"max_seq_len": 2048,
"model_type": "pico",
"n_layers": 12,
"norm_eps": 1e-06,
"position_emb_theta": 10000.0,
"tokenizer_class": "allenai/OLMo-7B-0724-hf",
"torch_dtype": "float32",
"transformers_version": "4.46.2",
"vocab_size": 50304
}