openfree commited on
Commit
4fec5a3
β€’
1 Parent(s): bee778b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -132
app.py CHANGED
@@ -96,169 +96,70 @@ def format_history(history):
96
  formatted_history.append({"role": "assistant", "content": assistant_msg})
97
  return formatted_history
98
 
 
99
  def chat(message, history, uploaded_file, system_message="", max_tokens=4000, temperature=0.7, top_p=0.9):
100
- system_prefix = """You are a file analysis expert. Analyze the uploaded file in depth from the following perspectives:
101
- 1. πŸ“‹ Overall structure and composition
102
- 2. πŸ“Š Key content and pattern analysis
103
- 3. πŸ“ˆ Data characteristics and meaning
104
- - For datasets: Column meanings, data types, value distributions
105
- - For text/code: Structural features, main patterns
106
- 4. πŸ’‘ Potential applications
107
- 5. ✨ Data quality and areas for improvement
108
 
109
- Provide detailed and structured analysis from an expert perspective, but explain in an easy-to-understand way. Format the analysis results in Markdown and include specific examples where possible."""
 
 
 
 
110
 
111
- if uploaded_file:
112
- content, file_type = read_uploaded_file(uploaded_file)
113
- if file_type == "error":
114
- return "", [{"role": "user", "content": message}, {"role": "assistant", "content": content}]
115
-
116
- file_summary = analyze_file_content(content, file_type)
117
-
118
- if file_type in ['parquet', 'csv']:
119
- system_message += f"\n\nFile Content:\n```markdown\n{content}\n```"
120
- else:
121
- system_message += f"\n\nFile Content:\n```\n{content}\n```"
122
-
123
- if message == "Starting file analysis...":
124
- message = f"""[Structure Analysis] {file_summary}
125
-
126
- Please provide detailed analysis from these perspectives:
127
- 1. πŸ“‹ Overall file structure and format
128
- 2. πŸ“Š Key content and component analysis
129
- 3. πŸ“ˆ Data/content characteristics and patterns
130
- 4. ⭐ Quality and completeness evaluation
131
- 5. πŸ’‘ Suggested improvements
132
- 6. 🎯 Practical applications and recommendations"""
133
 
134
- messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
135
-
136
- # Convert history to message format
137
- if history is not None:
138
- for item in history:
139
- if isinstance(item, dict):
140
- messages.append(item)
141
- elif isinstance(item, (list, tuple)) and len(item) == 2:
142
- messages.append({"role": "user", "content": item[0]})
143
- if item[1]:
144
- messages.append({"role": "assistant", "content": item[1]})
145
-
146
- messages.append({"role": "user", "content": message})
147
-
148
- try:
149
- client = get_client()
150
- partial_message = ""
151
- current_history = []
152
-
153
- for msg in client.chat_completion(
154
- messages,
155
- max_tokens=max_tokens,
156
- stream=True,
157
- temperature=temperature,
158
- top_p=top_p,
159
- ):
160
- token = msg.choices[0].delta.get('content', None)
161
- if token:
162
- partial_message += token
163
- current_history = [
164
- {"role": "user", "content": message},
165
- {"role": "assistant", "content": partial_message}
166
- ]
167
- yield "", current_history
168
-
169
- except Exception as e:
170
- error_msg = f"❌ Inference error: {str(e)}"
171
- error_history = [
172
- {"role": "user", "content": message},
173
- {"role": "assistant", "content": error_msg}
174
- ]
175
- yield "", error_history
176
-
177
- css = """
178
- footer {visibility: hidden}
179
- """
180
-
181
- with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css, title="EveryChat πŸ€–") as demo:
182
  gr.HTML(
183
  """
184
  <div style="text-align: center; max-width: 800px; margin: 0 auto;">
185
- <h1 style="font-size: 3em; font-weight: 600; margin: 0.5em;">EveryChat πŸ€–</h1>
186
- <h3 style="font-size: 1.2em; margin: 1em;">Your Intelligent File Analysis Assistant πŸ“Š</h3>
187
  </div>
188
  """
189
  )
190
-
191
  with gr.Row():
192
  with gr.Column(scale=2):
193
  chatbot = gr.Chatbot(
194
  height=600,
195
- label="Chat Interface πŸ’¬",
196
  type="messages"
197
  )
198
  msg = gr.Textbox(
199
- label="Type your message",
200
  show_label=False,
201
- placeholder="Ask me anything about the uploaded file... πŸ’­",
202
  container=False
203
  )
204
  with gr.Row():
205
- clear = gr.ClearButton([msg, chatbot])
206
- send = gr.Button("Send πŸ“€")
207
 
208
  with gr.Column(scale=1):
209
- gr.Markdown("### Upload File πŸ“\nSupport: Text, Code, CSV, Parquet files")
210
  file_upload = gr.File(
211
- label="Upload File",
212
  file_types=["text", ".csv", ".parquet"],
213
  type="filepath"
214
  )
215
 
216
- with gr.Accordion("Advanced Settings βš™οΈ", open=False):
217
- system_message = gr.Textbox(label="System Message πŸ“", value="")
218
- max_tokens = gr.Slider(minimum=1, maximum=8000, value=4000, label="Max Tokens πŸ“Š")
219
- temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature 🌑️")
220
- top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P πŸ“ˆ")
221
-
222
- # Event bindings
223
- msg.submit(
224
- chat,
225
- inputs=[msg, chatbot, file_upload, system_message, max_tokens, temperature, top_p],
226
- outputs=[msg, chatbot],
227
- queue=True
228
- ).then(
229
- lambda: gr.update(interactive=True),
230
- None,
231
- [msg]
232
- )
233
-
234
- send.click(
235
- chat,
236
- inputs=[msg, chatbot, file_upload, system_message, max_tokens, temperature, top_p],
237
- outputs=[msg, chatbot],
238
- queue=True
239
- ).then(
240
- lambda: gr.update(interactive=True),
241
- None,
242
- [msg]
243
- )
244
-
245
- # Auto-analysis on file upload
246
- file_upload.change(
247
- chat,
248
- inputs=[gr.Textbox(value="Starting file analysis..."), chatbot, file_upload, system_message, max_tokens, temperature, top_p],
249
- outputs=[msg, chatbot],
250
- queue=True
251
- )
252
 
253
- # Example queries
254
  gr.Examples(
255
  examples=[
256
- ["Please explain the overall structure and features of the file in detail πŸ“‹"],
257
- ["Analyze the main patterns and characteristics of this file πŸ“Š"],
258
- ["Evaluate the file's quality and potential improvements πŸ’‘"],
259
- ["How can we practically utilize this file? 🎯"],
260
- ["Summarize the main content and derive key insights ✨"],
261
- ["Please continue with more detailed analysis πŸ“ˆ"],
262
  ],
263
  inputs=msg,
264
  )
 
96
  formatted_history.append({"role": "assistant", "content": assistant_msg})
97
  return formatted_history
98
 
99
+ # μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ μˆ˜μ •
100
  def chat(message, history, uploaded_file, system_message="", max_tokens=4000, temperature=0.7, top_p=0.9):
101
+ system_prefix = """μ €λŠ” μ—¬λŸ¬λΆ„μ˜ μΉœκ·Όν•˜κ³  지적인 AI μ–΄μ‹œμŠ€ν„΄νŠΈμž…λ‹ˆλ‹€. λ‹€μŒκ³Ό 같은 μ›μΉ™μœΌλ‘œ μ†Œν†΅ν•˜κ² μŠ΅λ‹ˆλ‹€:
 
 
 
 
 
 
 
102
 
103
+ 1. 🀝 μΉœκ·Όν•˜κ³  곡감적인 νƒœλ„λ‘œ λŒ€ν™”
104
+ 2. πŸ’‘ λͺ…ν™•ν•˜κ³  μ΄ν•΄ν•˜κΈ° μ‰¬μš΄ μ„€λͺ… 제곡
105
+ 3. 🎯 질문의 μ˜λ„λ₯Ό μ •ν™•νžˆ νŒŒμ•…ν•˜μ—¬ λ§žμΆ€ν˜• λ‹΅λ³€
106
+ 4. πŸ“š ν•„μš”ν•œ 경우 μ—…λ‘œλ“œλœ 파일 λ‚΄μš©μ„ μ°Έκ³ ν•˜μ—¬ ꡬ체적인 도움 제곡
107
+ 5. ✨ 좔가적인 톡찰과 μ œμ•ˆμ„ ν†΅ν•œ κ°€μΉ˜ μžˆλŠ” λŒ€ν™”
108
 
109
+ 항상 예의 λ°”λ₯΄κ³  μΉœμ ˆν•˜κ²Œ μ‘λ‹΅ν•˜λ©°, ν•„μš”ν•œ 경우 ꡬ체적인 μ˜ˆμ‹œλ‚˜ μ„€λͺ…을 μΆ”κ°€ν•˜μ—¬
110
+ 이해λ₯Ό λ•κ² μŠ΅λ‹ˆλ‹€."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
111
 
112
+ # UI ν…μŠ€νŠΈ ν•œκΈ€ν™”
113
+ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="AI μ–΄μ‹œμŠ€ν„΄νŠΈ πŸ€–") as demo:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
114
  gr.HTML(
115
  """
116
  <div style="text-align: center; max-width: 800px; margin: 0 auto;">
117
+ <h1 style="font-size: 3em; font-weight: 600; margin: 0.5em;">AI μ–΄μ‹œμŠ€ν„΄νŠΈ πŸ€–</h1>
118
+ <h3 style="font-size: 1.2em; margin: 1em;">λ‹Ήμ‹ μ˜ λ“ λ“ ν•œ λŒ€ν™” νŒŒνŠΈλ„ˆ πŸ’¬</h3>
119
  </div>
120
  """
121
  )
122
+
123
  with gr.Row():
124
  with gr.Column(scale=2):
125
  chatbot = gr.Chatbot(
126
  height=600,
127
+ label="λŒ€ν™”μ°½ πŸ’¬",
128
  type="messages"
129
  )
130
  msg = gr.Textbox(
131
+ label="λ©”μ‹œμ§€ μž…λ ₯",
132
  show_label=False,
133
+ placeholder="무엇이든 λ¬Όμ–΄λ³΄μ„Έμš”... πŸ’­",
134
  container=False
135
  )
136
  with gr.Row():
137
+ clear = gr.ClearButton([msg, chatbot], value="λŒ€ν™”λ‚΄μš© μ§€μš°κΈ°")
138
+ send = gr.Button("보내기 πŸ“€")
139
 
140
  with gr.Column(scale=1):
141
+ gr.Markdown("### 파일 μ—…λ‘œλ“œ πŸ“\n지원 ν˜•μ‹: ν…μŠ€νŠΈ, μ½”λ“œ, CSV, Parquet 파일")
142
  file_upload = gr.File(
143
+ label="파일 선택",
144
  file_types=["text", ".csv", ".parquet"],
145
  type="filepath"
146
  )
147
 
148
+ with gr.Accordion("κ³ κΈ‰ μ„€μ • βš™οΈ", open=False):
149
+ system_message = gr.Textbox(label="μ‹œμŠ€ν…œ λ©”μ‹œμ§€ πŸ“", value="")
150
+ max_tokens = gr.Slider(minimum=1, maximum=8000, value=4000, label="μ΅œλŒ€ 토큰 수 πŸ“Š")
151
+ temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="μ°½μ˜μ„± μˆ˜μ€€ 🌑️")
152
+ top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="응닡 λ‹€μ–‘μ„± πŸ“ˆ")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
153
 
154
+ # μ˜ˆμ‹œ 질문 μˆ˜μ •
155
  gr.Examples(
156
  examples=[
157
+ ["μ•ˆλ…•ν•˜μ„Έμš”! μ–΄λ–€ 도움이 ν•„μš”ν•˜μ‹ κ°€μš”? 🀝"],
158
+ ["이 λ‚΄μš©μ— λŒ€ν•΄ μ’€ 더 μžμ„Ένžˆ μ„€λͺ…ν•΄ μ£Όμ‹€ 수 μžˆλ‚˜μš”? πŸ’‘"],
159
+ ["μ œκ°€ μ΄ν•΄ν•˜κΈ° μ‰½κ²Œ μ„€λͺ…ν•΄ μ£Όμ‹œκ² μ–΄μš”? πŸ“š"],
160
+ ["이 λ‚΄μš©μ„ μ‹€μ œλ‘œ μ–΄λ–»κ²Œ ν™œμš©ν•  수 μžˆμ„κΉŒμš”? 🎯"],
161
+ ["μΆ”κ°€λ‘œ μ‘°μ–Έν•΄ μ£Όμ‹€ λ‚΄μš©μ΄ μžˆμœΌμ‹ κ°€μš”? ✨"],
162
+ ["κΆκΈˆν•œ 점이 더 μžˆλŠ”λ° 여쭀봐도 λ κΉŒμš”? πŸ€”"],
163
  ],
164
  inputs=msg,
165
  )