Redmind commited on
Commit
0befa89
·
verified ·
1 Parent(s): 407d93b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +161 -52
app.py CHANGED
@@ -1,64 +1,173 @@
 
 
 
 
 
 
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
 
3
 
4
- """
5
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
- """
7
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
 
 
 
 
9
 
10
- def respond(
11
- message,
12
- history: list[tuple[str, str]],
13
- system_message,
14
- max_tokens,
15
- temperature,
16
- top_p,
17
- ):
18
- messages = [{"role": "system", "content": system_message}]
19
 
20
- for val in history:
21
- if val[0]:
22
- messages.append({"role": "user", "content": val[0]})
23
- if val[1]:
24
- messages.append({"role": "assistant", "content": val[1]})
25
 
26
- messages.append({"role": "user", "content": message})
 
 
 
 
 
 
 
 
 
27
 
28
- response = ""
29
 
30
- for message in client.chat_completion(
31
- messages,
32
- max_tokens=max_tokens,
33
- stream=True,
34
- temperature=temperature,
35
- top_p=top_p,
36
- ):
37
- token = message.choices[0].delta.content
38
 
39
- response += token
40
- yield response
 
 
 
41
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  """
44
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
45
- """
46
- demo = gr.ChatInterface(
47
- respond,
48
- additional_inputs=[
49
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
50
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
51
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
52
- gr.Slider(
53
- minimum=0.1,
54
- maximum=1.0,
55
- value=0.95,
56
- step=0.05,
57
- label="Top-p (nucleus sampling)",
58
- ),
59
- ],
60
- )
61
-
62
-
63
- if __name__ == "__main__":
64
- demo.launch()
 
1
+ import os
2
+ from dotenv import load_dotenv
3
+ from langchain_community.utilities import SQLDatabase
4
+ load_dotenv()
5
+ db_uri = os.getenv("DB_CONNECTION_STRING", "")
6
+ db_final = SQLDatabase.from_uri(db_uri)
7
  import gradio as gr
8
+ from file_upload import FileHandler
9
+ from chat import ChatHandler
10
 
11
+ # Initialize the FileHandler inline
12
+ VECTOR_DB_PATH = os.getenv("VECTOR_DB_PATH_DB", "")
13
+ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
14
+ GROK_API_KEY = os.getenv("GROK_API_KEY", "")
15
 
16
+ # Initialize FileHandler and ChatHandler
17
+ file_handler = FileHandler(VECTOR_DB_PATH, OPENAI_API_KEY, GROK_API_KEY)
18
+ chat_handler = ChatHandler(VECTOR_DB_PATH, OPENAI_API_KEY, GROK_API_KEY, db_final)
19
 
20
+ # Chat history
21
+ chat_history = []
 
 
 
 
 
 
 
22
 
 
 
 
 
 
23
 
24
+ # File Upload and Processing Function
25
+ def handle_file_uploads(files):
26
+ try:
27
+ if not files:
28
+ return "Please upload a file.", chat_history
29
+ file_name = files.name # Retrieve the name of the uploaded file
30
+ response = file_handler.handle_file_upload(file_name, files)
31
+ return response["message"]
32
+ except Exception as e:
33
+ return f"Error processing file: {e}", chat_history
34
 
 
35
 
36
+ # Chat Function
37
+ def handle_chat(user_message):
38
+ global chat_history
39
+ try:
40
+ if not user_message.strip():
41
+ return [("system", "Please enter a question.")]
 
 
42
 
43
+ # Add user message to chat history
44
+
45
+ if "chart" in user_message.strip():# or "graph" in user_message.strip() or "plot" in user_message.strip()):
46
+
47
+ parts = user_message.split("for", 1) # Split only at the first occurrence of "with"
48
 
49
+ # Assign the first part and second part to variables
50
+ visual_query = user_message # Strip to remove leading/trailing spaces
51
+ user_message = parts[1].strip() if len(parts) > 1 else None
52
+
53
+ else:
54
+ visual_query = None
55
+ # Get AI response
56
+ ai_response, visual_response = chat_handler.answer_question(user_message.strip(),visual_query)
57
+ user_message = f"<b style='color:#6366f1;'>user:</b> {user_message.strip()}"
58
+ if visual_response:
59
+ from PIL import Image
60
+ import base64
61
+ from io import BytesIO
62
+ # Open the image file
63
+ img = Image.open(visual_response)
64
 
65
+ # Convert the PIL Image to a base64 encoded string
66
+ buffered = BytesIO()
67
+ img.save(buffered, format="PNG")
68
+ img_str = base64.b64encode(buffered.getvalue()).decode("utf-8")
69
+
70
+ img = f'<img src="data:image/png;base64,{img_str}" style="width:450px; height:400px;">'
71
+
72
+ ai_response = img + ai_response
73
+
74
+ user_message = visual_query.strip()
75
+
76
+ ai_response = f"<b style='color:#6366f1;'>AI:</b> {ai_response}"
77
+ # Add AI response to chat history
78
+
79
+ chat_history.append((user_message,ai_response))
80
+
81
+ return chat_history
82
+
83
+ except Exception as e:
84
+ chat_history.append((visual_query, f"Sorry, I couldn't process your request.Please try again."))
85
+ return chat_history
86
+ # Chat handling function
87
+ def chat_interaction(user_message):
88
+ updated_history = handle_chat(user_message)
89
+ return updated_history, "" # Return the chat history to the chatbot
90
+
91
+
92
+ with gr.Blocks(theme="soft") as app: # Use the 'soft' theme
93
+ gr.Markdown(
94
+ "<h1 style='color:blue;'>Agentic AI - Chat with Documents & Database</h1>",
95
+ elem_id="title"
96
+ )
97
+
98
+ with gr.Tab("Chat"):
99
+ chat_box = gr.Chatbot(label="Chat History")
100
+ with gr.Row(): # Place elements in the same row
101
+ with gr.Column(scale=2):
102
+ user_input = gr.Textbox(
103
+ placeholder="Type your message here...",
104
+ label="Your Message",
105
+ elem_id="user-input", container=False)
106
+ with gr.Column(scale=1):
107
+ with gr.Row(): # Place the buttons in the same row
108
+ send_button = gr.Button("Send", elem_id="send-button")
109
+ clear_button = gr.Button("Clear", elem_id="clear-button")
110
+
111
+
112
+
113
+ send_button.click(
114
+ chat_interaction,
115
+ inputs=[user_input],
116
+ outputs=[chat_box, user_input]
117
+ )
118
+
119
+ with gr.Tab("File Upload"):
120
+ upload_button = gr.UploadButton(
121
+ label="Upload your file (PDF, Excel, Docx, Txt, CSV)",
122
+ file_types=[".pdf", ".xlsx", ".docx", ".txt", ".csv"],
123
+ file_count="single"
124
+ )
125
+ file_output = gr.Textbox(label="File Processing Output")
126
+ upload_button.upload(fn=handle_file_uploads, inputs=upload_button, outputs=[file_output])
127
+
128
+ # Clear input field function
129
+ def clear_input():
130
+ return "" # Clear the input field
131
+
132
+ clear_button.click(
133
+ clear_input,
134
+ inputs=[],
135
+ outputs=[user_input] # Clear only the user_input field
136
+ )
137
+
138
+ # Custom CSS for styling
139
+ app.css = """
140
+ #send-button {
141
+ color: white;
142
+ border-radius: 20px; /* Round corners */
143
+ background-color: #6366f1;
144
+ transition: background-color 0.3s, transform 0.3s;
145
+ }
146
+
147
+ #send-button:hover {
148
+ background-color: #6366f0; /* Change background color on hover */
149
+ transform: scale(1.05); /* Slightly enlarge on hover */
150
+ }
151
+ #clear-button {
152
+ color: white;
153
+ border-radius: 20px; /* Round corners */
154
+ background-color: #6366f1;
155
+ transition: background-color 0.3s, transform 0.3s;
156
+ }
157
+
158
+ #clear-button:hover {
159
+ background-color: #6366f0; /* Change background color on hover */
160
+ transform: scale(1.05); /* Slightly enlarge on hover */
161
+ }
162
+ #user-input {
163
+ flex-grow: 1; /* Allow textbox to take remaining space */
164
+ }
165
+
166
+ #title {
167
+ margin-bottom: 10px; /* Space below the title */
168
+ text-align: center;
169
+ }
170
  """
171
+
172
+ # Launch the app
173
+ app.launch(debug=True)