Spaces:
Sleeping
Sleeping
update chat_mistral to chose the client either groq or mistral api
Browse files- excel_chat.py +53 -125
excel_chat.py
CHANGED
@@ -4,144 +4,72 @@ from mistralai.models.chat_completion import ChatMessage
|
|
4 |
import os
|
5 |
import pandas as pd
|
6 |
import numpy as np
|
7 |
-
import
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
|
|
|
|
|
25 |
df = pd.read_excel(excel_file)
|
26 |
-
api_key = os.environ["MISTRAL_API_KEY"]
|
27 |
-
model = "mistral-small" # Use "Mistral-7B-v0.2" for "mistral-tiny"
|
28 |
-
|
29 |
-
|
30 |
-
client = MistralClient(api_key=api_key)
|
31 |
-
|
32 |
-
source_columns = source_cols#.split(", ") # Split input into multiple variables
|
33 |
df[dest_col] = ""
|
34 |
try:
|
35 |
file_name = url.split("/")[-2] + ".xlsx"
|
36 |
except:
|
37 |
file_name = excel_file
|
|
|
38 |
|
39 |
-
|
40 |
-
filtered_df = df[df['File'] == tdoc_name]
|
41 |
-
if not filtered_df.empty:
|
42 |
-
concatenated_content = "\n\n".join(f"{column_name}: {filtered_df[column_name].iloc[0]}" for column_name in source_columns)
|
43 |
-
messages = [ChatMessage(role="user", content=f"Using the following content: {concatenated_content}"), ChatMessage(role="user", content=prompt)]
|
44 |
-
chat_response = client.chat(model=model, messages=messages)
|
45 |
-
filtered_df.loc[filtered_df.index[0], dest_col] = chat_response.choices[0].message.content
|
46 |
-
# Update the DataFrame with the modified row
|
47 |
-
df.update(filtered_df)
|
48 |
-
# Write the updated DataFrame to the Excel file
|
49 |
-
df.to_excel(file_name, index=False)
|
50 |
-
return file_name, df.head(5)
|
51 |
-
else:
|
52 |
-
return file_name, df.head(5)
|
53 |
-
else:
|
54 |
-
for index, row in df.iterrows():
|
55 |
-
concatenated_content = "\n\n".join(f"{column_name}: {row[column_name]}" for column_name in source_columns)
|
56 |
-
# Check if the concatenated content is not empty
|
57 |
-
print('test')
|
58 |
-
if not concatenated_content == "\n\n".join(f"{column_name}: nan" for column_name in source_columns):
|
59 |
-
print('c bon')
|
60 |
-
messages = [ChatMessage(role="user", content=f"Using the following content: {concatenated_content}"), ChatMessage(role="user", content=prompt)]
|
61 |
-
chat_response = client.chat(model=model, messages=messages)
|
62 |
-
df.at[index, dest_col] = chat_response.choices[0].message.content
|
63 |
|
64 |
-
|
65 |
-
|
|
|
|
|
|
|
66 |
|
|
|
|
|
67 |
|
68 |
def get_columns(file):
|
69 |
if file is not None:
|
70 |
df = pd.read_excel(file)
|
71 |
columns = list(df.columns)
|
72 |
-
return gr.update(choices=columns), gr.update(choices=columns), gr.update(choices=columns), gr.update(choices=columns + [""]), df.head(5)
|
73 |
else:
|
74 |
-
return gr.update(choices=[]), gr.update(choices=[]), gr.update(choices=[]), gr.update(choices=[]), pd.DataFrame()
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
# Categories
|
79 |
-
categories = [
|
80 |
-
{
|
81 |
-
"topic": "Confidentiality and Privacy Protection",
|
82 |
-
"description": "This topic covers the protection of confidentiality, privacy, and integrity in security systems. It also includes authentication and authorization processes.",
|
83 |
-
"experts": ["Mireille"]
|
84 |
-
},
|
85 |
-
{
|
86 |
-
"topic": "Distributed Trust and End-User Trust Models",
|
87 |
-
"description": "This topic focuses on distributed trust models and how end-users establish trust in secure systems.",
|
88 |
-
"experts": ["Mireille", "Khawla"]
|
89 |
-
},
|
90 |
-
{
|
91 |
-
"topic": "Secure Element and Key Provisioning",
|
92 |
-
"description": "This topic involves the secure element in systems and the process of key provisioning.",
|
93 |
-
"experts": ["Mireille"]
|
94 |
-
},
|
95 |
-
{
|
96 |
-
"topic": "Residential Gateway Security",
|
97 |
-
"description": "This topic covers the security aspects of Residential Gateways.",
|
98 |
-
"experts": ["Mireille"]
|
99 |
-
},
|
100 |
-
{
|
101 |
-
"topic": "Standalone Non-Public Network (SNPN) Inter-Connection and Cybersecurity",
|
102 |
-
"description": "This topic focuses on the inter-connection of Standalone Non-Public Networks and related cyber-security topics.",
|
103 |
-
"experts": ["Khawla"]
|
104 |
-
},
|
105 |
-
{
|
106 |
-
"topic": "Distributed Ledger and Blockchain in SNPN",
|
107 |
-
"description": "This topic covers the use of distributed ledger technology and blockchain in securing Standalone Non-Public Networks.",
|
108 |
-
"experts": ["Khawla"]
|
109 |
-
},
|
110 |
-
{
|
111 |
-
"topic": "Distributed Networks and Communication",
|
112 |
-
"description": "This topic involves distributed networks such as mesh networks, ad-hoc networks, and multi-hop networks, and their cyber-security aspects.",
|
113 |
-
"experts": ["Guillaume"]
|
114 |
-
},
|
115 |
-
{
|
116 |
-
"topic": "Swarm of Drones and Unmanned Aerial Vehicles Network Infrastructure",
|
117 |
-
"description": "This topic covers the network infrastructure deployed by Swarm of Drones and Unmanned Aerial Vehicles.",
|
118 |
-
"experts": ["Guillaume"]
|
119 |
-
},
|
120 |
-
{
|
121 |
-
"topic": "USIM and Over-the-Air Services",
|
122 |
-
"description": "This topic involves USIM and related over-the-air services such as Steering of Roaming, roaming services, network selection, and UE configuration.",
|
123 |
-
"experts": ["Vincent"]
|
124 |
-
},
|
125 |
-
{
|
126 |
-
"topic": "Eco-Design and Societal Impact of Technology",
|
127 |
-
"description": "This topic covers eco-design concepts, including energy saving, energy efficiency, carbon emissions, and the societal impact of technology.",
|
128 |
-
"experts": ["Pierre"]
|
129 |
-
},
|
130 |
-
{
|
131 |
-
"topic": "Service Requirements of New Services",
|
132 |
-
"description": "This topic involves defining service requirements for new services, detecting low signals of new trends and technologies, and assessing their impact on USIM services or over-the-air services.",
|
133 |
-
"experts": ["Ly-Thanh"]
|
134 |
-
},
|
135 |
-
{
|
136 |
-
"topic": "Satellite and Non Terrestrial Networks",
|
137 |
-
"description": "This topic covers satellite networks, Non Terrestrial Networks, Private Networks, IoT, Inter Satellite communication, and Radio Access Network.",
|
138 |
-
"experts": ["Nicolas"]
|
139 |
-
},
|
140 |
-
{
|
141 |
-
"topic": "Public Safety and Emergency Communication",
|
142 |
-
"description": "This topic involves Public Safety Communication, Military Communication, Emergency Calls, Emergency Services, Disaster Communication Access, and other related areas.",
|
143 |
-
"experts": ["Dorin"]
|
144 |
-
}
|
145 |
-
]
|
146 |
-
|
147 |
-
df_cate = pd.DataFrame(categories)
|
|
|
4 |
import os
|
5 |
import pandas as pd
|
6 |
import numpy as np
|
7 |
+
from groq import Groq
|
8 |
+
|
9 |
+
def ask_llm(query, input, client_index):
|
10 |
+
messages = [
|
11 |
+
{
|
12 |
+
"role": "system",
|
13 |
+
"content": f"You are a helpful assistant. Only show your final response to the **User Query**! Do not provide any explanations or details: \n# User Query:\n{query}."
|
14 |
+
},
|
15 |
+
{
|
16 |
+
"role": "user",
|
17 |
+
"content": f"{input}",
|
18 |
+
}
|
19 |
+
]
|
20 |
+
|
21 |
+
if client_index == 0:
|
22 |
+
client = Groq(api_key=userdata.get('GROQ_API_KEY'))
|
23 |
+
chat_completion = client.chat.completions.create(
|
24 |
+
messages=messages,
|
25 |
+
model='mixtral-8x7b-32768',
|
26 |
+
)
|
27 |
+
else:
|
28 |
+
client = MistralClient(api_key=userdata.get('MISTRAL_API_KEY'))
|
29 |
+
chat_completion = client.chat(
|
30 |
+
messages=messages,
|
31 |
+
model='mistral-small-latest',
|
32 |
+
)
|
33 |
+
|
34 |
+
return chat_completion.choices[0].message.content
|
35 |
+
|
36 |
+
def filter_df(df, column_name, keywords):
|
37 |
+
if len(keywords)>0:
|
38 |
+
if column_name in df.columns:
|
39 |
+
contains_keyword = lambda x: any(keyword.lower() in (x.lower() if type(x)==str else '') for keyword in keywords)
|
40 |
+
filtered_df = df[df[column_name].apply(contains_keyword)]
|
41 |
+
else:
|
42 |
+
contains_keyword = lambda row: any(keyword.lower() in (str(cell).lower() if isinstance(cell, str) else '') for keyword in keywords for cell in row)
|
43 |
+
filtered_df = df[df.apply(contains_keyword, axis=1)]
|
44 |
+
else:
|
45 |
+
filtered_df = df
|
46 |
+
return filtered_df
|
47 |
|
48 |
+
def chat_with_mistral(source_cols, dest_col, prompt, excel_file, url, search_col, keywords, client):
|
49 |
+
print(f'xlsxfile = {excel_file}')
|
50 |
df = pd.read_excel(excel_file)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
df[dest_col] = ""
|
52 |
try:
|
53 |
file_name = url.split("/")[-2] + ".xlsx"
|
54 |
except:
|
55 |
file_name = excel_file
|
56 |
+
print(f"Keywords: {keywords}")
|
57 |
|
58 |
+
filtred_df = filter_df(df, search_col, keywords)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
+
for index, row in filtred_df.iterrows():
|
61 |
+
concatenated_content = "\n\n".join(f"{column_name}: {str(row[column_name])}" for column_name in source_cols)
|
62 |
+
llm_answer = ask_llm(prompt, concatenated_content, client)
|
63 |
+
print(f"QUERY:\n{prompt}\nCONTENT:\n{concatenated_content[:200]}...\n\nANSWER:\n{llm_answer}")
|
64 |
+
df.at[index, dest_col] = llm_answer
|
65 |
|
66 |
+
df.to_excel(file_name, index=False)
|
67 |
+
return file_name, df.head(5)
|
68 |
|
69 |
def get_columns(file):
|
70 |
if file is not None:
|
71 |
df = pd.read_excel(file)
|
72 |
columns = list(df.columns)
|
73 |
+
return gr.update(choices=columns), gr.update(choices=columns), gr.update(choices=columns), gr.update(choices=columns + [""]), gr.update(choices=columns + ['[ALL]']), df.head(5)
|
74 |
else:
|
75 |
+
return gr.update(choices=[]), gr.update(choices=[]), gr.update(choices=[]), gr.update(choices=[]), gr.update(choices=[]), pd.DataFrame()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|