gorkemgoknar commited on
Commit
b20afa0
1 Parent(s): 5d4cdcd

Update app.py

Browse files

fix llama output

Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -88,8 +88,8 @@ def get_audio_url(text,character):
88
  def get_response_cpp(prompt):
89
 
90
  output = llm(prompt, max_tokens=32, stop=["#","sierpeda"], echo=True)
91
- print(output)
92
- response_Text= output["choices"][0]["text"]
93
 
94
  return response_text
95
 
 
88
  def get_response_cpp(prompt):
89
 
90
  output = llm(prompt, max_tokens=32, stop=["#","sierpeda"], echo=True)
91
+ #print(output)
92
+ response_text= output["choices"][0]["text"]
93
 
94
  return response_text
95