BPIV / chat_app.py
eswardivi's picture
Upload 7 files
e39fd18 verified
import streamlit as st
import streamlit.components.v1 as components
import openai
import subprocess
import re
import os
import pandas as pd
import socket
import time
# csv_name = "./Financial_Sample.csv"
csv_name='./global_superstore_2016.csv'
try:
df = pd.read_csv(csv_name)
metadata = {
"columns": df.columns.tolist(),
"dtypes": df.dtypes.apply(lambda x: x.name).to_dict(),
"shape": df.shape,
}
except Exception as e:
st.error(f"Error loading CSV file: {e}")
st.stop()
def is_port_in_use(port: int) -> bool:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
return s.connect_ex(("localhost", port)) == 0
def find_free_port(starting_port: int) -> int:
port = starting_port
while is_port_in_use(port):
port += 1
return port
def runcode(code: str) -> tuple:
with open("code.py", "w") as file:
file.write(code)
starting_port = 8501
free_port = find_free_port(starting_port)
try:
process = subprocess.Popen(
["streamlit", "run", "code.py", "--server.port", str(free_port)],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
time.sleep(5)
if process.poll() is None:
return (
free_port,
f"Streamlit process started successfully on port {free_port}",
)
else:
stdout, stderr = process.communicate()
return (None, f"Streamlit process exited unexpectedly. Error: {stderr}")
except FileNotFoundError:
return (None, "Error: Streamlit is not installed or not in PATH")
except Exception as e:
return (None, f"An unexpected error occurred: {str(e)}")
class Agent:
def __init__(self, system_prompt="", known_actions=None):
self.system = system_prompt
self.messages = []
self.known_actions = known_actions if known_actions is not None else {}
self.client = openai.OpenAI(
api_key= os.environ.get('TOGETHER_API_KEY'),
base_url="https://api.together.xyz/v1",
)
self.messages.append({"role": "system", "content": self.system})
def __call__(self, message):
self.messages.append({"role": "user", "content": message})
result = self.execute()
self.messages.append({"role": "assistant", "content": result})
return result
def execute(self):
try:
# Using 405B for Better performance
response = self.client.chat.completions.create(
model="meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo",
stop=["PAUSE"],
messages=self.messages,
)
return response.choices[0].message.content
except Exception as e:
return f"Error executing model: {str(e)}"
def query(self, question, max_turns=5):
i = 0
next_prompt = question
while i < max_turns:
i += 1
result = self(next_prompt)
st.session_state.logs += f"Assistant: {result}\n"
print(f"Assistant: {result}\n")
# action_re = re.search(r"Action: (\w+): (.*)", result, re.DOTALL)
# Modify RE for detecting if input goes to next line TODO,BUG
action_re = re.search(r"Action: (\w+): *\n(.*)", result, re.DOTALL)
if action_re:
action = action_re.group(1)
action_input = action_re.group(2).strip()
st.session_state.logs += (
f"Action: {action}\nAction Input: {action_input}\n"
)
print(f"Action: {action}\nAction Input: {action_input}\n")
if action not in self.known_actions:
error_msg = f"Unknown action: {action}: {action_input}"
raise Exception(error_msg)
st.session_state.logs += (
f" ------------------------\n running {action} {action_input}\n"
)
print(f" ------------------------\n running {action} {action_input}\n")
observation = self.known_actions[action](action_input)
st.session_state.logs += f"Observation: {observation}\n"
print(f"Observation: {observation}\n")
next_prompt = f"Observation: {observation}"
else:
print("No action found, ending query")
return
known_actions = {"runcode": runcode}
prompt = f"""
You are an expert in creating advanced Interactive Streamlit Dashboards in python based on user Query and you run in a loop of Thought, Action, PAUSE, Observation.
At the end of the loop you output an Answer
Use Thought to describe your thoughts about the question you have been asked.
Use Action to run one of the actions available to you - then return PAUSE.
Observation will be the result of running those actions.
Always return just code no need of ```
Your Task is help user get result of query about below dataset,Decide based on user query to make Dashboard or Just Textual Answer.
Here is the metadata of the dataset and name of dataset is {csv_name}:
use plotly preferably.
Columns: {metadata['columns']}
Dtypes: {metadata['dtypes']}
Shape: {metadata['shape']}
You can use this metadata to generate results.
Your available actions are:
runcode
How to use actions
Action : action_name: input_to_action
if input_to_action is code then don't use ``` just write code.
Always Follow Action : action_name: input_to_action
Example session:
Question: Give me a dashboard to visualize the people height and age
Thought: I need to run a create a dashboard where i can visualize the Subscriptions and Country data
Action: runcode: import streamlit as st
import pandas as pd
import plotly.express as px
from datetime import datetime
def load_data():
df = pd.read_csv(f{csv_name})
return df
df = load_data()
st.title("Height vs Age Visualization")
fig = px.scatter(df, x='age', y='height', opacity=0.6)
fig.update_layout(
xaxis_title="Age (years)",
yaxis_title="Height (cm)",
title="Height vs Age Scatter Plot"
)
st.plotly_chart(fig, use_container_width=True)
PAUSE
Observation : understand the output based its stdout and take necessary steps.
Answer: Final Answer for User Request if its Dashboard send "Please visit link to view dashboard" or Textual Answer "Your Interpretation of Answer"
""".strip()
if "bot" not in st.session_state:
st.session_state.bot = Agent(system_prompt=prompt, known_actions=known_actions)
st.set_page_config(layout="wide")
st.title("Customer Data Analysis")
if "logs" not in st.session_state:
st.session_state.logs = ""
if "messages" not in st.session_state:
st.session_state.messages = []
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if query := st.chat_input("Enter your query about the dataset"):
st.session_state.messages.append({"role": "user", "content": query})
with st.chat_message("user"):
st.markdown(query)
st.session_state.logs = ""
with st.spinner("Generating response..."):
st.session_state.bot.query(query)
answer_match = re.search(r"Answer:", st.session_state.logs)
if answer_match:
answer_end_index = answer_match.end()
subsequent_text = st.session_state.logs[answer_end_index:].strip()
else:
st.warning("No answer found in the generated response.")
with st.chat_message("assistant"):
url_pattern = r"(https?://[^\s]+)"
url = re.findall(url_pattern, subsequent_text)
if url:
components.iframe(src=url[0], width=800, height=600)
st.write(subsequent_text)
else:
st.success(subsequent_text)
st.session_state.messages.append({"role": "assistant", "content": subsequent_text})
with st.sidebar:
with st.expander("Logs"):
st.code(st.session_state.logs)
st.title("Dataset Metadata")
with st.expander("Metadata"):
st.write("Columns:", metadata["columns"])
st.write("Dtypes:", metadata["dtypes"])
st.write("Shape:", metadata["shape"])
st.write("Sample Data:")
st.write(df.head())
# TODO - Add Clear Button