Mine / config.json
bpatel644's picture
Upload CustomAI
77d4d5e
raw
history blame
338 Bytes
{
"architectures": [
"CustomAI"
],
"auto_map": {
"AutoConfig": "config.CustomAIConfig",
"AutoModelForCausalLM": "model.CustomAI"
},
"dropout": 0.2,
"model_type": "CustomAI",
"n_embd": 384,
"n_head": 4,
"n_layer": 4,
"torch_dtype": "float32",
"transformers_version": "4.36.0.dev0",
"vocab_size": 1000
}