import streamlit as st from langchain.schema import HumanMessage,SystemMessage,AIMessage from langchain.chat_models import ChatOpenAI ##streamlit UI st.set_page_config(page_title="Conversatoinal Q&A ChatBot") st.header("Hey, Let's Chat") # from dotenv import load_dotenv # load_dotenv() import os chat=ChatOpenAI(temperature=0.5) if 'flowmessages' not in st.session_state: st.session_state['flowmessages']=[ SystemMessage(content="Your are a comedian AI assistant") ] ## Function to load OpenAI model and get responses def get_chatmodel_response(question): st.session_state['flowmessages'].append(HumanMessage(content=question)) answer=chat(st.session_state['flowmessages']) st.session_state['flowmessages'].append(AIMessage(content=answer.content)) return answer.content input = st.text_input("Input: ", key="input") submit = st.button("Ask the question") llm=ChatOpenAI(openai_api_key=os.getenv("OPENAI_API_KEY"), model_name="gpt-3.5-turbo-instruct", temperature=0.5) if submit: st.subheader("The Response is") response = get_chatmodel_response(input) st.write(response)