Update app.py
Browse files
app.py
CHANGED
@@ -98,15 +98,15 @@ async def get_answer_llama(request: Request ):
|
|
98 |
text = data['text']
|
99 |
print("recived ",text)
|
100 |
|
101 |
-
try:
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
except:
|
109 |
-
|
110 |
|
111 |
|
112 |
res= do_ML_LLAMA13b(text,0)
|
@@ -246,15 +246,15 @@ def do_ML_LLAMA13b(text:str, trycount:int):
|
|
246 |
|
247 |
return "Requested Could not be proceed"
|
248 |
|
249 |
-
|
250 |
-
|
251 |
-
|
252 |
-
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
-
|
258 |
|
259 |
# value=""
|
260 |
try:
|
|
|
98 |
text = data['text']
|
99 |
print("recived ",text)
|
100 |
|
101 |
+
# try:
|
102 |
+
# model = data['model']
|
103 |
+
# if model == '13b':
|
104 |
+
# print('using 13b')
|
105 |
+
# res= do_ML_LLAMA13b(text,0)
|
106 |
+
# dict={"LLAMA":res}
|
107 |
+
# return JSONResponse(dict)
|
108 |
+
# except:
|
109 |
+
# print("Using 7b")
|
110 |
|
111 |
|
112 |
res= do_ML_LLAMA13b(text,0)
|
|
|
246 |
|
247 |
return "Requested Could not be proceed"
|
248 |
|
249 |
+
try:
|
250 |
+
queue_element = driver.find_element(By.CLASS_NAME,'progress-text')
|
251 |
+
queue_text = queue_element.text
|
252 |
+
print(queue_text)
|
253 |
+
if "queue" in queue_text:
|
254 |
+
print("Queue is present")
|
255 |
+
return do_ML_LLAMA7b(text, trycount)
|
256 |
+
except:
|
257 |
+
pass
|
258 |
|
259 |
# value=""
|
260 |
try:
|