Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
from fastapi import FastAPI, HTTPException, Query
|
2 |
from fastapi.responses import JSONResponse
|
3 |
-
from webscout import WEBS, transcriber, LLM
|
4 |
from typing import Optional, List, Dict, Union
|
5 |
from fastapi.encoders import jsonable_encoder
|
6 |
from bs4 import BeautifulSoup
|
@@ -151,20 +151,31 @@ async def llm_chat(
|
|
151 |
except Exception as e:
|
152 |
raise HTTPException(status_code=500, detail=f"Error during LLM chat: {e}")
|
153 |
|
154 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
155 |
|
156 |
@app.get("/api/fastAI")
|
157 |
-
async def
|
158 |
"""Get a response from the Snova AI service."""
|
159 |
-
fast_ai.model = model
|
160 |
-
fast_ai.system = system
|
161 |
try:
|
162 |
-
|
163 |
-
response = await asyncio.to_thread(fast_ai.generate_response, user)
|
164 |
return JSONResponse(content={"response": response})
|
165 |
except Exception as e:
|
166 |
raise HTTPException(status_code=500, detail=f"Error during Snova AI request: {e}")
|
167 |
|
|
|
168 |
|
169 |
@app.get("/api/answers")
|
170 |
async def answers(q: str, proxy: Optional[str] = None):
|
|
|
1 |
from fastapi import FastAPI, HTTPException, Query
|
2 |
from fastapi.responses import JSONResponse
|
3 |
+
from webscout import WEBS, transcriber, LLM
|
4 |
from typing import Optional, List, Dict, Union
|
5 |
from fastapi.encoders import jsonable_encoder
|
6 |
from bs4 import BeautifulSoup
|
|
|
151 |
except Exception as e:
|
152 |
raise HTTPException(status_code=500, detail=f"Error during LLM chat: {e}")
|
153 |
|
154 |
+
def snova_ai(user, model="llama3-70b", system="Answer as concisely as possible."):
|
155 |
+
env_type = "tp16405b" if "405b" in model else "tp16"
|
156 |
+
data = {'body': {'messages': [{'role': 'system', 'content': system}, {'role': 'user', 'content': user}], 'stream': True, 'model': model}, 'env_type': env_type}
|
157 |
+
with requests.post('https://fast.snova.ai/api/completion', headers={'content-type': 'application/json'}, json=data, stream=True) as response:
|
158 |
+
output = ''
|
159 |
+
for line in response.iter_lines(decode_unicode=True):
|
160 |
+
if line.startswith('data:'):
|
161 |
+
try:
|
162 |
+
data = json.loads(line[len('data: '):])
|
163 |
+
output += data.get("choices", [{}])[0].get("delta", {}).get("content", '')
|
164 |
+
except json.JSONDecodeError:
|
165 |
+
if line[len('data: '):] == '[DONE]':
|
166 |
+
break
|
167 |
+
return output
|
168 |
|
169 |
@app.get("/api/fastAI")
|
170 |
+
async def fast_ai(user: str, model: str = "llama3-70b", system: str = "Answer as concisely as possible."):
|
171 |
"""Get a response from the Snova AI service."""
|
|
|
|
|
172 |
try:
|
173 |
+
response = await asyncio.to_thread(snova_ai, user, model, system)
|
|
|
174 |
return JSONResponse(content={"response": response})
|
175 |
except Exception as e:
|
176 |
raise HTTPException(status_code=500, detail=f"Error during Snova AI request: {e}")
|
177 |
|
178 |
+
|
179 |
|
180 |
@app.get("/api/answers")
|
181 |
async def answers(q: str, proxy: Optional[str] = None):
|