imperialwool commited on
Commit
84b88a1
1 Parent(s): 3b4c94a

Update gradio_app.py

Browse files
Files changed (1) hide show
  1. gradio_app.py +10 -4
gradio_app.py CHANGED
@@ -60,15 +60,21 @@ def generate_answer(request: str, max_tokens: int = 256, language: str = "en", c
60
  try:
61
  maxTokens = max_tokens if 16 <= max_tokens <= 256 else 64
62
  if isinstance(custom_prompt, str):
63
- userPrompt = custom_prompt + "\n\nUser: " + request + "\nAssistant: "
64
  else:
65
- userPrompt = prompt + "\n\nUser: " + request + "\nAssistant: "
 
66
  except:
67
  return "Not enough data! Check that you passed all needed data.", logs
68
 
69
  try:
70
- output = llm(userPrompt, max_tokens=maxTokens, stop=["User:"], echo=False)
71
- text = output["choices"][0]["text"]
 
 
 
 
 
72
  if language in languages:
73
  logs += f"\nTranslating from en to {language}"
74
  encoded_input = translator_tokenizer(text, return_tensors="pt")
 
60
  try:
61
  maxTokens = max_tokens if 16 <= max_tokens <= 256 else 64
62
  if isinstance(custom_prompt, str):
63
+ userPrompt = custom_prompt + "\n\nUSER: " + request + "\nASSISTANT: "
64
  else:
65
+ userPrompt = prompt + "\n\nUSER: " + request + "\nASSISTANT: "
66
+ logs += f"\nFinal prompt: {userPrompt}\n"
67
  except:
68
  return "Not enough data! Check that you passed all needed data.", logs
69
 
70
  try:
71
+ # this shitty fix will be until i willnt figure out why sometimes there is empty output
72
+ while True:
73
+ output = llm(userPrompt, max_tokens=maxTokens, stop=["User:"], echo=False)
74
+ text = output["choices"][0]["text"]
75
+ if len(text.strip()) > 1 and text.strip() not in ['', None, ' ']:
76
+ break
77
+
78
  if language in languages:
79
  logs += f"\nTranslating from en to {language}"
80
  encoded_input = translator_tokenizer(text, return_tensors="pt")