kce / app.py
teddyllm's picture
Update app.py
0e0cf5a verified
import os
from langchain_openai import ChatOpenAI
from langchain_community.vectorstores import FAISS
from langchain_openai import OpenAIEmbeddings
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough
from langchain_core.prompts import ChatPromptTemplate
from langchain.retrievers import ContextualCompressionRetriever
from langchain.retrievers.document_compressors import FlashrankRerank
from langchain_openai import ChatOpenAI
import gradio as gr
def format_docs(docs):
return "\n\n".join(doc.page_content for doc in docs)
embeddings = OpenAIEmbeddings(model="text-embedding-3-small")
db = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
retriever = db.as_retriever(k=10)
compressor = FlashrankRerank()
compression_retriever = ContextualCompressionRetriever(
base_compressor=compressor, base_retriever=retriever
)
llm = ChatOpenAI(model="gpt-4o")
prompt = ChatPromptTemplate.from_messages([
('system',
"You are a KCE chatbot, and you are assisting customers with the inquires about the company."
"Answer the questions witht the provided context. Do not include based on the context or based on the documents in your answer."
"Remember that your job is to represent KCE company."
"Please say you do not know if you do not know or cannot find the information needed."
"\n Question: {question} \nContext: {context}"),
('user', "{question}")
])
rag_chain = (
{"context": compression_retriever | format_docs, "question": RunnablePassthrough()}
| prompt
| llm
| StrOutputParser()
)
def chat_gen(message, history):
history_openai_format = []
for human, assistant in history:
history_openai_format.append({"role": "user", "content": human })
history_openai_format.append({"role": "assistant", "content":assistant})
history_openai_format.append({"role": "user", "content": message})
partial_message=""
for chunk in rag_chain.stream(message):
partial_message = partial_message + chunk
yield partial_message
initial_msg = "Hello! I am KCE assistant. You can ask me anything about KCE. I am happy to assist you."
chatbot = gr.Chatbot(value = [[None, initial_msg]])
demo = gr.ChatInterface(chat_gen, chatbot=chatbot).queue()
try:
demo.launch(debug=True, share=False, show_api=False)
demo.close()
except Exception as e:
demo.close()
print(e)
raise e