Spaces:
Sleeping
Sleeping
File size: 5,580 Bytes
fe2a0f2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 |
import gradio as gr
import requests
import time
import logging
import json
from typing import Dict, Any
from datetime import datetime
import asyncio
import aiohttp
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s | %(levelname)s | %(message)s',
handlers=[
logging.StreamHandler(),
logging.FileHandler(f'chat_logs_{datetime.now().strftime("%Y%m%d_%H%M%S")}.log')
]
)
class Chatbot:
def __init__(self):
self.api_url = "http://localhost:3000/api/v1/prediction/6e2ce64b-b45b-4794-97b6-a85480882d72"
self.headers = {
"Authorization": "Bearer kpIJOgDQH9XWy3YmvcfTj18A_2PGJi6uXhLCCGogkFM",
"Content-Type": "application/json",
"Accept": "text/event-stream"
}
self.history = []
self.request_count = 0
async def stream_response(self, message: str):
"""Stream response from API"""
async with aiohttp.ClientSession() as session:
try:
payload = {
"question": message,
"overrideConfig": {
"supervisorName": "AI Assistant",
"supervisorPrompt": "You are a helpful AI assistant",
"summarization": True,
"recursionLimit": 1
}
}
async with session.post(
self.api_url,
headers=self.headers,
json=payload
) as response:
response.raise_for_status()
full_response = ""
async for line in response.content:
if line:
try:
decoded_line = line.decode('utf-8').strip()
if decoded_line.startswith('data: '):
data = json.loads(decoded_line[6:])
chunk = data.get('token', '')
full_response += chunk
yield full_response
except json.JSONDecodeError:
continue
except Exception as e:
error_msg = f"Streaming Error: {str(e)}"
logging.error(error_msg)
yield error_msg
async def chat(self, message: str, history: list):
"""Process chat message and return response"""
logging.info(f"""
================== CHAT MESSAGE ==================
User Message: {message}
History Length: {len(history)}
Timestamp: {datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')}
================================================
""")
# Add user message to history
self.history = history + [(message, None)]
# Stream response
async for partial_response in self.stream_response(message):
self.history[-1] = (message, partial_response)
yield self.history
logging.info(f"""
================== BOT RESPONSE ==================
Bot Answer: {self.history[-1][1]}
Total Messages in History: {len(self.history)}
================================================
""")
def create_ui() -> gr.Interface:
"""Create and configure the chat UI"""
logging.info("Initializing chat interface...")
# Initialize chatbot
bot = Chatbot()
# Create blocks for custom chat interface
with gr.Blocks(theme=gr.themes.Soft()) as chat_interface:
gr.Markdown("# AI Agent Assistant")
gr.Markdown("Ask me anything! I'll stream the response as I think.")
chatbot = gr.Chatbot()
msg = gr.Textbox(
placeholder="Type your message here...",
container=False,
scale=7
)
with gr.Row():
submit = gr.Button("Send", variant="primary", scale=1)
clear = gr.Button("Clear", variant="secondary", scale=1)
# Example questions
gr.Examples(
examples=[
"What is machine learning?",
"How do neural networks work?",
"Explain Python decorators"
],
inputs=msg
)
# Set up event handlers
submit_event = msg.submit(
fn=bot.chat,
inputs=[msg, chatbot],
outputs=chatbot,
api_name="chat"
)
submit_click_event = submit.click(
fn=bot.chat,
inputs=[msg, chatbot],
outputs=chatbot,
api_name="chat"
)
clear.click(lambda: None, None, chatbot, queue=False)
clear.click(lambda: "", None, msg, queue=False)
logging.info("Chat interface created successfully")
return chat_interface
def main():
logging.info("""
=================================================================
Starting Agent Chatbot Application
Time: {time}
=================================================================
""".format(time=datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
# Create and launch the UI
ui = create_ui()
# Launch with sharing enabled for Colab compatibility
logging.info("Launching web interface...")
ui.launch(share=True, debug=True, server_port=7860)
if __name__ == "__main__":
main() |