text_to_sql / generation_config.json
shaheerzk's picture
Update generation_config.json
d21db56 verified
raw
history blame contribute delete
701 Bytes
{
"_from_model_config": true,
"bos_token_id": 1,
"eos_token_id": 2,
"pad_token_id": 0,
"transformers_version": "4.35.2",
"max_length": 512, // Limits the length of the output text
"temperature": 0.7, // Controls randomness, higher means more random
"top_p": 0.9, // Nucleus sampling, considers only top_p highest probability tokens
"top_k": 50, // Limits the sample space to top_k tokens with highest probabilities
"do_sample": true, // Enables sampling instead of greedy decoding
"repetition_penalty": 1.2, // Penalizes repetition
"num_return_sequences": 1 // Number of output sequences to generate
}