Kathirsci commited on
Commit
55c778e
·
verified ·
1 Parent(s): 94feff0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -2
app.py CHANGED
@@ -25,8 +25,14 @@ QA_CHAIN_PROMPT = PromptTemplate(input_variables=["context", "question"],templat
25
  def predict(message, history):
26
  input_prompt = QA_CHAIN_PROMPT.format(question=message, context=history)
27
  result = llm.generate([input_prompt])
28
- print(result)
29
- ai_msg = result[0].generations[0].text
 
 
 
 
 
 
30
  return ai_msg
31
 
32
 
 
25
  def predict(message, history):
26
  input_prompt = QA_CHAIN_PROMPT.format(question=message, context=history)
27
  result = llm.generate([input_prompt])
28
+ print(result) # Print the result for inspection
29
+
30
+ # Access the generated text using the correct attribute(s)
31
+ if result.generations:
32
+ ai_msg = result.generations[0][0].text
33
+ else:
34
+ ai_msg = "I'm sorry, I couldn't generate a response for that input."
35
+
36
  return ai_msg
37
 
38