Spaces:
Running
on
Zero
Running
on
Zero
cutechicken
commited on
Commit
β’
2317674
1
Parent(s):
ebbebcc
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,315 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from dotenv import load_dotenv
|
3 |
+
import gradio as gr
|
4 |
+
from huggingface_hub import InferenceClient
|
5 |
+
import pandas as pd
|
6 |
+
from typing import List, Tuple
|
7 |
+
import json
|
8 |
+
from datetime import datetime
|
9 |
+
|
10 |
+
# νκ²½ λ³μ μ€μ
|
11 |
+
HF_TOKEN = os.getenv("HF_TOKEN")
|
12 |
+
|
13 |
+
# LLM Models Definition
|
14 |
+
LLM_MODELS = {
|
15 |
+
"Cohere c4ai-crp-08-2024": "CohereForAI/c4ai-command-r-plus-08-2024", # Default
|
16 |
+
"Meta Llama3.3-70B": "meta-llama/Llama-3.3-70B-Instruct" # Backup model
|
17 |
+
}
|
18 |
+
|
19 |
+
class ChatHistory:
|
20 |
+
def __init__(self):
|
21 |
+
self.history = []
|
22 |
+
self.history_file = "/tmp/chat_history.json"
|
23 |
+
self.load_history()
|
24 |
+
|
25 |
+
def add_conversation(self, user_msg: str, assistant_msg: str):
|
26 |
+
conversation = {
|
27 |
+
"timestamp": datetime.now().isoformat(),
|
28 |
+
"messages": [
|
29 |
+
{"role": "user", "content": user_msg},
|
30 |
+
{"role": "assistant", "content": assistant_msg}
|
31 |
+
]
|
32 |
+
}
|
33 |
+
self.history.append(conversation)
|
34 |
+
self.save_history()
|
35 |
+
|
36 |
+
def format_for_display(self):
|
37 |
+
# Gradio Chatbot μ»΄ν¬λνΈμ λ§λ νμμΌλ‘ λ³ν
|
38 |
+
formatted = []
|
39 |
+
for conv in self.history:
|
40 |
+
formatted.append([
|
41 |
+
conv["messages"][0]["content"], # user message
|
42 |
+
conv["messages"][1]["content"] # assistant message
|
43 |
+
])
|
44 |
+
return formatted
|
45 |
+
|
46 |
+
def get_messages_for_api(self):
|
47 |
+
# API νΈμΆμ μν λ©μμ§ νμ
|
48 |
+
messages = []
|
49 |
+
for conv in self.history:
|
50 |
+
messages.extend([
|
51 |
+
{"role": "user", "content": conv["messages"][0]["content"]},
|
52 |
+
{"role": "assistant", "content": conv["messages"][1]["content"]}
|
53 |
+
])
|
54 |
+
return messages
|
55 |
+
|
56 |
+
def clear_history(self):
|
57 |
+
self.history = []
|
58 |
+
self.save_history()
|
59 |
+
|
60 |
+
def save_history(self):
|
61 |
+
try:
|
62 |
+
with open(self.history_file, 'w', encoding='utf-8') as f:
|
63 |
+
json.dump(self.history, f, ensure_ascii=False, indent=2)
|
64 |
+
except Exception as e:
|
65 |
+
print(f"νμ€ν 리 μ μ₯ μ€ν¨: {e}")
|
66 |
+
|
67 |
+
def load_history(self):
|
68 |
+
try:
|
69 |
+
if os.path.exists(self.history_file):
|
70 |
+
with open(self.history_file, 'r', encoding='utf-8') as f:
|
71 |
+
self.history = json.load(f)
|
72 |
+
except Exception as e:
|
73 |
+
print(f"νμ€ν 리 λ‘λ μ€ν¨: {e}")
|
74 |
+
self.history = []
|
75 |
+
|
76 |
+
|
77 |
+
# μ μ ChatHistory μΈμ€ν΄μ€ μμ±
|
78 |
+
chat_history = ChatHistory()
|
79 |
+
|
80 |
+
def get_client(model_name="Cohere c4ai-crp-08-2024"):
|
81 |
+
try:
|
82 |
+
return InferenceClient(LLM_MODELS[model_name], token=HF_TOKEN)
|
83 |
+
except Exception:
|
84 |
+
return InferenceClient(LLM_MODELS["Meta Llama3.3-70B"], token=HF_TOKEN)
|
85 |
+
|
86 |
+
def analyze_file_content(content, file_type):
|
87 |
+
"""Analyze file content and return structural summary"""
|
88 |
+
if file_type in ['parquet', 'csv']:
|
89 |
+
try:
|
90 |
+
lines = content.split('\n')
|
91 |
+
header = lines[0]
|
92 |
+
columns = header.count('|') - 1
|
93 |
+
rows = len(lines) - 3
|
94 |
+
return f"π λ°μ΄ν°μ
ꡬ쑰: {columns}κ° μ»¬λΌ, {rows}κ° λ°μ΄ν°"
|
95 |
+
except:
|
96 |
+
return "β λ°μ΄ν°μ
ꡬ쑰 λΆμ μ€ν¨"
|
97 |
+
|
98 |
+
lines = content.split('\n')
|
99 |
+
total_lines = len(lines)
|
100 |
+
non_empty_lines = len([line for line in lines if line.strip()])
|
101 |
+
|
102 |
+
if any(keyword in content.lower() for keyword in ['def ', 'class ', 'import ', 'function']):
|
103 |
+
functions = len([line for line in lines if 'def ' in line])
|
104 |
+
classes = len([line for line in lines if 'class ' in line])
|
105 |
+
imports = len([line for line in lines if 'import ' in line or 'from ' in line])
|
106 |
+
return f"π» μ½λ ꡬ쑰: {total_lines}μ€ (ν¨μ: {functions}, ν΄λμ€: {classes}, μν¬νΈ: {imports})"
|
107 |
+
|
108 |
+
paragraphs = content.count('\n\n') + 1
|
109 |
+
words = len(content.split())
|
110 |
+
return f"π λ¬Έμ ꡬ쑰: {total_lines}μ€, {paragraphs}λ¨λ½, μ½ {words}λ¨μ΄"
|
111 |
+
|
112 |
+
def read_uploaded_file(file):
|
113 |
+
if file is None:
|
114 |
+
return "", ""
|
115 |
+
try:
|
116 |
+
file_ext = os.path.splitext(file.name)[1].lower()
|
117 |
+
|
118 |
+
if file_ext == '.parquet':
|
119 |
+
df = pd.read_parquet(file.name, engine='pyarrow')
|
120 |
+
content = df.head(10).to_markdown(index=False)
|
121 |
+
return content, "parquet"
|
122 |
+
elif file_ext == '.csv':
|
123 |
+
encodings = ['utf-8', 'cp949', 'euc-kr', 'latin1']
|
124 |
+
for encoding in encodings:
|
125 |
+
try:
|
126 |
+
df = pd.read_csv(file.name, encoding=encoding)
|
127 |
+
content = f"π λ°μ΄ν° 미리보기:\n{df.head(10).to_markdown(index=False)}\n\n"
|
128 |
+
content += f"\nπ λ°μ΄ν° μ 보:\n"
|
129 |
+
content += f"- μ 체 ν μ: {len(df)}\n"
|
130 |
+
content += f"- μ 체 μ΄ μ: {len(df.columns)}\n"
|
131 |
+
content += f"- μ»¬λΌ λͺ©λ‘: {', '.join(df.columns)}\n"
|
132 |
+
content += f"\nπ μ»¬λΌ λ°μ΄ν° νμ
:\n"
|
133 |
+
for col, dtype in df.dtypes.items():
|
134 |
+
content += f"- {col}: {dtype}\n"
|
135 |
+
null_counts = df.isnull().sum()
|
136 |
+
if null_counts.any():
|
137 |
+
content += f"\nβ οΈ κ²°μΈ‘μΉ:\n"
|
138 |
+
for col, null_count in null_counts[null_counts > 0].items():
|
139 |
+
content += f"- {col}: {null_count}κ° λλ½\n"
|
140 |
+
return content, "csv"
|
141 |
+
except UnicodeDecodeError:
|
142 |
+
continue
|
143 |
+
raise UnicodeDecodeError(f"β μ§μλλ μΈμ½λ©μΌλ‘ νμΌμ μ½μ μ μμ΅λλ€ ({', '.join(encodings)})")
|
144 |
+
else:
|
145 |
+
encodings = ['utf-8', 'cp949', 'euc-kr', 'latin1']
|
146 |
+
for encoding in encodings:
|
147 |
+
try:
|
148 |
+
with open(file.name, 'r', encoding=encoding) as f:
|
149 |
+
content = f.read()
|
150 |
+
return content, "text"
|
151 |
+
except UnicodeDecodeError:
|
152 |
+
continue
|
153 |
+
raise UnicodeDecodeError(f"β μ§μλλ μΈμ½λ©μΌλ‘ νμΌμ μ½μ μ μμ΅λλ€ ({', '.join(encodings)})")
|
154 |
+
except Exception as e:
|
155 |
+
return f"β νμΌ μ½κΈ° μ€λ₯: {str(e)}", "error"
|
156 |
+
|
157 |
+
def chat(message, history, uploaded_file, system_message="", max_tokens=4000, temperature=0.7, top_p=0.9):
|
158 |
+
if not message:
|
159 |
+
return "", history
|
160 |
+
|
161 |
+
system_prefix = """μ λ μ¬λ¬λΆμ μΉκ·Όνκ³ μ§μ μΈ AI μ΄μμ€ν΄νΈ 'GiniGEN'μ
λλ€.. λ€μκ³Ό κ°μ μμΉμΌλ‘ μν΅νκ² μ΅λλ€:
|
162 |
+
1. π€ μΉκ·Όνκ³ κ³΅κ°μ μΈ νλλ‘ λν
|
163 |
+
2. π‘ λͺ
ννκ³ μ΄ν΄νκΈ° μ¬μ΄ μ€λͺ
μ 곡
|
164 |
+
3. π― μ§λ¬Έμ μλλ₯Ό μ νν νμ
νμ¬ λ§μΆ€ν λ΅λ³
|
165 |
+
4. π νμν κ²½μ° μ
λ‘λλ νμΌ λ΄μ©μ μ°Έκ³ νμ¬ κ΅¬μ²΄μ μΈ λμ μ 곡
|
166 |
+
5. β¨ μΆκ°μ μΈ ν΅μ°°κ³Ό μ μμ ν΅ν κ°μΉ μλ λν
|
167 |
+
|
168 |
+
νμ μμ λ°λ₯΄κ³ μΉμ νκ² μλ΅νλ©°, νμν κ²½μ° κ΅¬μ²΄μ μΈ μμλ μ€λͺ
μ μΆκ°νμ¬
|
169 |
+
μ΄ν΄λ₯Ό λκ² μ΅λλ€."""
|
170 |
+
|
171 |
+
try:
|
172 |
+
# νμΌ μ
λ‘λ μ²λ¦¬
|
173 |
+
if uploaded_file:
|
174 |
+
content, file_type = read_uploaded_file(uploaded_file)
|
175 |
+
if file_type == "error":
|
176 |
+
error_message = content
|
177 |
+
chat_history.add_conversation(message, error_message)
|
178 |
+
return "", history + [[message, error_message]]
|
179 |
+
|
180 |
+
file_summary = analyze_file_content(content, file_type)
|
181 |
+
|
182 |
+
if file_type in ['parquet', 'csv']:
|
183 |
+
system_message += f"\n\nνμΌ λ΄μ©:\n```markdown\n{content}\n```"
|
184 |
+
else:
|
185 |
+
system_message += f"\n\nνμΌ λ΄μ©:\n```\n{content}\n```"
|
186 |
+
|
187 |
+
if message == "νμΌ λΆμμ μμν©λλ€...":
|
188 |
+
message = f"""[νμΌ κ΅¬μ‘° λΆμ] {file_summary}
|
189 |
+
λ€μ κ΄μ μμ λμμ λλ¦¬κ² μ΅λλ€:
|
190 |
+
1. π μ λ°μ μΈ λ΄μ© νμ
|
191 |
+
2. π‘ μ£Όμ νΉμ§ μ€λͺ
|
192 |
+
3. π― μ€μ©μ μΈ νμ© λ°©μ
|
193 |
+
4. β¨ κ°μ μ μ
|
194 |
+
5. π¬ μΆκ° μ§λ¬Έμ΄λ νμν μ€λͺ
"""
|
195 |
+
|
196 |
+
# λ©μμ§ μ²λ¦¬
|
197 |
+
messages = [{"role": "system", "content": system_prefix + system_message}]
|
198 |
+
|
199 |
+
# μ΄μ λν νμ€ν 리 μΆκ°
|
200 |
+
if history:
|
201 |
+
for user_msg, assistant_msg in history:
|
202 |
+
messages.append({"role": "user", "content": user_msg})
|
203 |
+
messages.append({"role": "assistant", "content": assistant_msg})
|
204 |
+
|
205 |
+
messages.append({"role": "user", "content": message})
|
206 |
+
|
207 |
+
# API νΈμΆ λ° μλ΅ μ²λ¦¬
|
208 |
+
client = get_client()
|
209 |
+
partial_message = ""
|
210 |
+
|
211 |
+
for msg in client.chat_completion(
|
212 |
+
messages,
|
213 |
+
max_tokens=max_tokens,
|
214 |
+
stream=True,
|
215 |
+
temperature=temperature,
|
216 |
+
top_p=top_p,
|
217 |
+
):
|
218 |
+
token = msg.choices[0].delta.get('content', None)
|
219 |
+
if token:
|
220 |
+
partial_message += token
|
221 |
+
current_history = history + [[message, partial_message]]
|
222 |
+
yield "", current_history
|
223 |
+
|
224 |
+
# μμ±λ λν μ μ₯
|
225 |
+
chat_history.add_conversation(message, partial_message)
|
226 |
+
|
227 |
+
except Exception as e:
|
228 |
+
error_msg = f"β μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
|
229 |
+
chat_history.add_conversation(message, error_msg)
|
230 |
+
yield "", history + [[message, error_msg]]
|
231 |
+
|
232 |
+
with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN π€") as demo:
|
233 |
+
# κΈ°μ‘΄ νμ€ν 리 λ‘λ
|
234 |
+
initial_history = chat_history.format_for_display()
|
235 |
+
with gr.Row():
|
236 |
+
with gr.Column(scale=2):
|
237 |
+
chatbot = gr.Chatbot(
|
238 |
+
value=initial_history, # μ μ₯λ νμ€ν λ¦¬λ‘ μ΄κΈ°ν
|
239 |
+
height=600,
|
240 |
+
label="λνμ°½ π¬",
|
241 |
+
show_label=True
|
242 |
+
)
|
243 |
+
|
244 |
+
|
245 |
+
msg = gr.Textbox(
|
246 |
+
label="λ©μμ§ μ
λ ₯",
|
247 |
+
show_label=False,
|
248 |
+
placeholder="무μμ΄λ λ¬Όμ΄λ³΄μΈμ... π",
|
249 |
+
container=False
|
250 |
+
)
|
251 |
+
with gr.Row():
|
252 |
+
clear = gr.ClearButton([msg, chatbot], value="λνλ΄μ© μ§μ°κΈ°")
|
253 |
+
send = gr.Button("보λ΄κΈ° π€")
|
254 |
+
|
255 |
+
with gr.Column(scale=1):
|
256 |
+
gr.Markdown("### GiniGEN π€ [νμΌ μ
λ‘λ] π\nμ§μ νμ: ν
μ€νΈ, μ½λ, CSV, Parquet νμΌ")
|
257 |
+
file_upload = gr.File(
|
258 |
+
label="νμΌ μ ν",
|
259 |
+
file_types=["text", ".csv", ".parquet"],
|
260 |
+
type="filepath"
|
261 |
+
)
|
262 |
+
|
263 |
+
with gr.Accordion("κ³ κΈ μ€μ βοΈ", open=False):
|
264 |
+
system_message = gr.Textbox(label="μμ€ν
λ©μμ§ π", value="")
|
265 |
+
max_tokens = gr.Slider(minimum=1, maximum=8000, value=4000, label="μ΅λ ν ν° μ π")
|
266 |
+
temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="μ°½μμ± μμ€ π‘οΈ")
|
267 |
+
top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="μλ΅ λ€μμ± π")
|
268 |
+
|
269 |
+
# μμ μ§λ¬Έ
|
270 |
+
gr.Examples(
|
271 |
+
examples=[
|
272 |
+
["μλ
νμΈμ! μ΄λ€ λμμ΄ νμνμ κ°μ? π€"],
|
273 |
+
["μ κ° μ΄ν΄νκΈ° μ½κ² μ€λͺ
ν΄ μ£Όμκ² μ΄μ? π"],
|
274 |
+
["μ΄ λ΄μ©μ μ€μ λ‘ μ΄λ»κ² νμ©ν μ μμκΉμ? π―"],
|
275 |
+
["μΆκ°λ‘ μ‘°μΈν΄ μ£Όμ€ λ΄μ©μ΄ μμΌμ κ°μ? β¨"],
|
276 |
+
["κΆκΈν μ μ΄ λ μλλ° μ¬μ€λ΄λ λ κΉμ? π€"],
|
277 |
+
],
|
278 |
+
inputs=msg,
|
279 |
+
)
|
280 |
+
|
281 |
+
# λνλ΄μ© μ§μ°κΈ° λ²νΌμ νμ€ν 리 μ΄κΈ°ν κΈ°λ₯ μΆκ°
|
282 |
+
def clear_chat():
|
283 |
+
chat_history.clear_history()
|
284 |
+
return None, None
|
285 |
+
|
286 |
+
# μ΄λ²€νΈ λ°μΈλ©
|
287 |
+
msg.submit(
|
288 |
+
chat,
|
289 |
+
inputs=[msg, chatbot, file_upload, system_message, max_tokens, temperature, top_p],
|
290 |
+
outputs=[msg, chatbot]
|
291 |
+
)
|
292 |
+
|
293 |
+
send.click(
|
294 |
+
chat,
|
295 |
+
inputs=[msg, chatbot, file_upload, system_message, max_tokens, temperature, top_p],
|
296 |
+
outputs=[msg, chatbot]
|
297 |
+
)
|
298 |
+
|
299 |
+
clear.click(
|
300 |
+
clear_chat,
|
301 |
+
outputs=[msg, chatbot]
|
302 |
+
)
|
303 |
+
|
304 |
+
# νμΌ μ
λ‘λμ μλ λΆμ
|
305 |
+
file_upload.change(
|
306 |
+
lambda: "νμΌ λΆμμ μμν©λλ€...",
|
307 |
+
outputs=msg
|
308 |
+
).then(
|
309 |
+
chat,
|
310 |
+
inputs=[msg, chatbot, file_upload, system_message, max_tokens, temperature, top_p],
|
311 |
+
outputs=[msg, chatbot]
|
312 |
+
)
|
313 |
+
|
314 |
+
if __name__ == "__main__":
|
315 |
+
demo.launch()
|