Spaces:
Sleeping
Sleeping
Update excel_chat.py
Browse files- excel_chat.py +9 -9
excel_chat.py
CHANGED
@@ -8,9 +8,9 @@ from groq import Groq
|
|
8 |
import anthropic
|
9 |
from users_management import update_json, users
|
10 |
|
11 |
-
#users = ['maksG', '
|
12 |
|
13 |
-
def ask_llm(query, input, client_index):
|
14 |
messages = [
|
15 |
{
|
16 |
"role": "system",
|
@@ -35,31 +35,31 @@ def ask_llm(query, input, client_index):
|
|
35 |
]
|
36 |
|
37 |
if client_index == "Groq":
|
38 |
-
client = Groq(api_key=os.environ[
|
39 |
chat_completion = client.chat.completions.create(
|
40 |
messages=messages,
|
41 |
model='mixtral-8x7b-32768',
|
42 |
)
|
43 |
elif client_index == "Mistral Small":
|
44 |
-
client = MistralClient(api_key=os.environ['
|
45 |
chat_completion = client.chat(
|
46 |
messages=messages,
|
47 |
model='mistral-small-latest',
|
48 |
)
|
49 |
elif client_index == "Mistral Tiny":
|
50 |
-
client = MistralClient(api_key=os.environ['
|
51 |
chat_completion = client.chat(
|
52 |
messages=messages,
|
53 |
model='mistral-tiny',
|
54 |
)
|
55 |
elif client_index == "Mistral Medium":
|
56 |
-
client = MistralClient(api_key=os.environ['
|
57 |
chat_completion = client.chat(
|
58 |
messages=messages,
|
59 |
model='mistral-medium',
|
60 |
)
|
61 |
elif client_index == "Claude Opus":
|
62 |
-
client = anthropic.Anthropic(api_key=os.environ['
|
63 |
chat_completion = client.messages.create(
|
64 |
model="claude-3-opus-20240229",
|
65 |
max_tokens=350,
|
@@ -69,7 +69,7 @@ def ask_llm(query, input, client_index):
|
|
69 |
).content[0].text
|
70 |
return chat_completion
|
71 |
else:
|
72 |
-
client = anthropic.Anthropic(api_key=os.environ['
|
73 |
chat_completion = client.messages.create(
|
74 |
model="claude-3-sonnet-20240229",
|
75 |
max_tokens=350,
|
@@ -111,7 +111,7 @@ def chat_with_mistral(source_cols, dest_col, prompt, excel_file, url, search_col
|
|
111 |
print('test')
|
112 |
if not concatenated_content == "\n\n".join(f"{column_name}: nan" for column_name in source_cols):
|
113 |
print('c bon')
|
114 |
-
llm_answer = ask_llm(prompt[0], concatenated_content, client)
|
115 |
print(f"QUERY:\n{prompt[0]}\nCONTENT:\n{concatenated_content[:200]}...\n\nANSWER:\n{llm_answer}")
|
116 |
df.at[index, dest_col] = llm_answer
|
117 |
|
|
|
8 |
import anthropic
|
9 |
from users_management import update_json, users
|
10 |
|
11 |
+
#users = ['maksG', 'AlmaA', 'YchK']
|
12 |
|
13 |
+
def ask_llm(query, input, client_index, user):
|
14 |
messages = [
|
15 |
{
|
16 |
"role": "system",
|
|
|
35 |
]
|
36 |
|
37 |
if client_index == "Groq":
|
38 |
+
client = Groq(api_key=os.environ[user['api_keys']['groq']])
|
39 |
chat_completion = client.chat.completions.create(
|
40 |
messages=messages,
|
41 |
model='mixtral-8x7b-32768',
|
42 |
)
|
43 |
elif client_index == "Mistral Small":
|
44 |
+
client = MistralClient(api_key=os.environ[user['api_keys']['mistral']])
|
45 |
chat_completion = client.chat(
|
46 |
messages=messages,
|
47 |
model='mistral-small-latest',
|
48 |
)
|
49 |
elif client_index == "Mistral Tiny":
|
50 |
+
client = MistralClient(api_key=os.environ[user['api_keys']['mistral']])
|
51 |
chat_completion = client.chat(
|
52 |
messages=messages,
|
53 |
model='mistral-tiny',
|
54 |
)
|
55 |
elif client_index == "Mistral Medium":
|
56 |
+
client = MistralClient(api_key=os.environ[user['api_keys']['mistral']])
|
57 |
chat_completion = client.chat(
|
58 |
messages=messages,
|
59 |
model='mistral-medium',
|
60 |
)
|
61 |
elif client_index == "Claude Opus":
|
62 |
+
client = anthropic.Anthropic(api_key=os.environ[user['api_keys']['claude'])
|
63 |
chat_completion = client.messages.create(
|
64 |
model="claude-3-opus-20240229",
|
65 |
max_tokens=350,
|
|
|
69 |
).content[0].text
|
70 |
return chat_completion
|
71 |
else:
|
72 |
+
client = anthropic.Anthropic(api_key=os.environ[user['api_keys']['claude']])
|
73 |
chat_completion = client.messages.create(
|
74 |
model="claude-3-sonnet-20240229",
|
75 |
max_tokens=350,
|
|
|
111 |
print('test')
|
112 |
if not concatenated_content == "\n\n".join(f"{column_name}: nan" for column_name in source_cols):
|
113 |
print('c bon')
|
114 |
+
llm_answer = ask_llm(prompt[0], concatenated_content, client, user)
|
115 |
print(f"QUERY:\n{prompt[0]}\nCONTENT:\n{concatenated_content[:200]}...\n\nANSWER:\n{llm_answer}")
|
116 |
df.at[index, dest_col] = llm_answer
|
117 |
|