File size: 271 Bytes
0b7a312 |
1 2 3 4 5 6 7 8 9 10 11 12 |
{
"_name_or_path": "ebowwa/bad_llm_dpov03",
"architectures": [
"LLaMAForCausalLM"
],
"bos_token_id": 1,
"eos_token_id": 2,
"model_type": "llama",
"torch_dtype": "float16",
"transformers_version": "4.27.0",
"vocab_size": 32000
} |