Spaces:
Running
Running
Mehmet Emin Aydin
commited on
changed api provider
Browse files
app.py
CHANGED
@@ -14,11 +14,13 @@ import pickle
|
|
14 |
from datetime import datetime
|
15 |
import io
|
16 |
from dotenv import load_dotenv
|
17 |
-
from groq import Groq
|
|
|
18 |
load_dotenv()
|
19 |
log_data = []
|
20 |
|
21 |
-
client = Groq(api_key=os.getenv("GROQ_API_KEY"))
|
|
|
22 |
|
23 |
class User:
|
24 |
def __init__(self, username):
|
@@ -88,8 +90,8 @@ def ask_question(user: User, question: str, vector_store : FAISS) -> tuple[str,
|
|
88 |
_log(user, question, retrieved_chunks, response)
|
89 |
return answer, 200
|
90 |
|
91 |
-
|
92 |
-
def get_completion(prompt, model="
|
93 |
messages = [
|
94 |
{
|
95 |
"role": "system",
|
@@ -109,6 +111,7 @@ def get_completion(prompt, model="llama3-8b-8192"):
|
|
109 |
return response.choices[0].message.content.strip()
|
110 |
|
111 |
|
|
|
112 |
def _log(user: User, question: str, retrieved_chunks: str, answer: str):
|
113 |
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
114 |
log_message = (
|
|
|
14 |
from datetime import datetime
|
15 |
import io
|
16 |
from dotenv import load_dotenv
|
17 |
+
# from groq import Groq
|
18 |
+
from together import Together
|
19 |
load_dotenv()
|
20 |
log_data = []
|
21 |
|
22 |
+
# client = Groq(api_key=os.getenv("GROQ_API_KEY"))
|
23 |
+
client = Together(api_key=os.getenv("TOGETHER_API_KEY"))
|
24 |
|
25 |
class User:
|
26 |
def __init__(self, username):
|
|
|
90 |
_log(user, question, retrieved_chunks, response)
|
91 |
return answer, 200
|
92 |
|
93 |
+
# Function to get completion with batching\n",
|
94 |
+
def get_completion(prompt, model="meta-llama/Llama-3.3-70B-Instruct-Turbo-Free"):
|
95 |
messages = [
|
96 |
{
|
97 |
"role": "system",
|
|
|
111 |
return response.choices[0].message.content.strip()
|
112 |
|
113 |
|
114 |
+
|
115 |
def _log(user: User, question: str, retrieved_chunks: str, answer: str):
|
116 |
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
117 |
log_message = (
|