llama2-docker-compose
/
data
/models--meta-llama--Llama-2-7b-chat-hf
/snapshots
/f5db02db724555f92da89c216ac04704f23d4590
/generation_config.json
{ | |
"bos_token_id": 1, | |
"do_sample": true, | |
"eos_token_id": 2, | |
"max_length": 4096, | |
"pad_token_id": 0, | |
"temperature": 0.6, | |
"top_p": 0.9, | |
"transformers_version": "4.32.0.dev0" | |
} | |