IliaLarchenko commited on
Commit
1e66988
1 Parent(s): 437e3cd

Added max_tokens

Browse files
Files changed (1) hide show
  1. api/llm.py +2 -1
api/llm.py CHANGED
@@ -59,7 +59,7 @@ class LLMManager:
59
 
60
  def get_text(self, messages):
61
  try:
62
- response = self.client.chat.completions.create(model=self.config.llm.name, messages=messages, temperature=1)
63
  if not response.choices:
64
  raise APIError("LLM Get Text Error", details="No choices in response")
65
  return response.choices[0].message.content.strip()
@@ -73,6 +73,7 @@ class LLMManager:
73
  messages=messages,
74
  temperature=1,
75
  stream=True,
 
76
  )
77
  except Exception as e:
78
  raise APIError(f"LLM End Interview Error: Unexpected error: {e}")
 
59
 
60
  def get_text(self, messages):
61
  try:
62
+ response = self.client.chat.completions.create(model=self.config.llm.name, messages=messages, temperature=1, max_tokens=2000)
63
  if not response.choices:
64
  raise APIError("LLM Get Text Error", details="No choices in response")
65
  return response.choices[0].message.content.strip()
 
73
  messages=messages,
74
  temperature=1,
75
  stream=True,
76
+ max_tokens=2000,
77
  )
78
  except Exception as e:
79
  raise APIError(f"LLM End Interview Error: Unexpected error: {e}")