lavanjv commited on
Commit
32da5f6
1 Parent(s): 2c2ef07

Update model.py

Browse files
Files changed (1) hide show
  1. model.py +1 -1
model.py CHANGED
@@ -82,7 +82,7 @@ Only return the helpful answer below and nothing else.
82
  Helpful answer:
83
  """.format(query_with_history, message)
84
  # Generate text using the LLM model and the custom prompt
85
- max_generated_length = 99900 # Desired length of the generated text
86
  total_prefix_length = len(custom_prompt_template.split())
87
  max_length = total_prefix_length + max_generated_length
88
 
 
82
  Helpful answer:
83
  """.format(query_with_history, message)
84
  # Generate text using the LLM model and the custom prompt
85
+ max_generated_length = 7000 # Desired length of the generated text
86
  total_prefix_length = len(custom_prompt_template.split())
87
  max_length = total_prefix_length + max_generated_length
88