Raxder / app.py
ogegadavis254's picture
Update app.py
fadd816 verified
raw
history blame
4.14 kB
import streamlit as st
import os
import requests
import json
entire_assistant_response = ""
def get_streamed_response(message, history, model):
all_message = []
for human, assistant in history:
all_message.append({"role": "user", "content": human})
all_message.append({"role": "assistant", "content": assistant})
global entire_assistant_response
entire_assistant_response = "" # Reset the entire assistant response
all_message.append({"role": "user", "content": message})
url = "https://api.together.xyz/v1/chat/completions"
payload = {
"model": model,
"temperature": 1.05,
"top_p": 0.9,
"top_k": 50,
"repetition_penalty": 1,
"n": 1,
"messages": all_message,
"stream_tokens": True,
}
TOGETHER_API_KEY = os.getenv('TOGETHER_API_KEY')
headers = {
"accept": "application/json",
"content-type": "application/json",
"Authorization": f"Bearer {TOGETHER_API_KEY}",
}
response = requests.post(url, json=payload, headers=headers, stream=True)
response.raise_for_status() # Ensure HTTP request was successful
for line in response.iter_lines():
if line:
decoded_line = line.decode('utf-8')
# Check for the completion signal
if decoded_line == "data: [DONE]":
yield entire_assistant_response # Yield the entire response at the end
break
try:
# Decode and strip any SSE format specific prefix ("data: ")
if decoded_line.startswith("data: "):
decoded_line = decoded_line.replace("data: ", "")
chunk_data = json.loads(decoded_line)
content = chunk_data['choices'][0]['delta']['content']
entire_assistant_response += content # Aggregate content
yield entire_assistant_response
except json.JSONDecodeError:
print(f"Invalid JSON received: {decoded_line}")
continue
except KeyError as e:
print(f"KeyError encountered: {e}")
continue
print(entire_assistant_response)
all_message.append({"role": "assistant", "content": entire_assistant_response})
# Initialize Streamlit app
st.title("AI Chatbot")
# Initialize session state if not present
if "messages" not in st.session_state:
st.session_state.messages = []
# Define available models
models = {
"Addiction Recovery": "model_addiction_recovery",
"Mental Health": "model_mental_health",
"Wellness": "model_wellness"
}
# Allow user to select a model
selected_model = st.selectbox("Select Model", list(models.keys()))
# Define models
model_addiction_recovery = "model_addiction_recovery"
model_mental_health = "model_mental_health"
model_wellness = "model_wellness"
# Accept user input
if prompt := st.text_input("You:", key="user_input"):
# Display user message
with st.spinner("AI is typing..."):
st.session_state.messages.append({"role": "user", "content": prompt})
# Call selected model to get response
if selected_model == "Addiction Recovery":
response_stream = get_streamed_response(prompt, [(m["content"] for m in st.session_state.messages[:-1])], model_addiction_recovery)
elif selected_model == "Mental Health":
response_stream = get_streamed_response(prompt, [(m["content"] for m in st.session_state.messages[:-1])], model_mental_health)
elif selected_model == "Wellness":
response_stream = get_streamed_response(prompt, [(m["content"] for m in st.session_state.messages[:-1])], model_wellness)
for response in response_stream:
st.session_state.messages.append({"role": "assistant", "content": response})
# Display chat history
for message in st.session_state.messages:
if message["role"] == "user":
st.text_input("You:", value=message["content"], disabled=True)
else:
st.text_input("AI:", value=message["content"], disabled=True)