Kaung Myat Htet commited on
Commit
7ba9a7d
·
1 Parent(s): e72ad1c

add app.py

Browse files
Files changed (2) hide show
  1. app.py +65 -0
  2. requirements.txt +6 -0
app.py ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+ from langchain_openai import ChatOpenAI
4
+ from langchain_community.vectorstores import FAISS
5
+ from langchain_openai import OpenAIEmbeddings
6
+ from langchain_core.output_parsers import StrOutputParser
7
+ from langchain_core.runnables import RunnablePassthrough
8
+ from langchain_core.prompts import ChatPromptTemplate
9
+ import gradio as gr
10
+
11
+
12
+ def format_docs(docs):
13
+ print(docs)
14
+ return "\n\n".join(doc.page_content for doc in docs)
15
+
16
+
17
+ prompt = ChatPromptTemplate.from_messages([
18
+ ('system',
19
+ "You are a KCE chatbot, and you are assisting customers with the inquires about the company."
20
+ "Answer the questions witht the provided context. Do not include based on the context or based on the documents in your answer."
21
+ "Remember that your job is to represent KCE company."
22
+ "Please say you do not know if you do not know or cannot find the information needed."
23
+ "\n Question: {question} \nContext: {context}"),
24
+ ('user', "{question}")
25
+ ])
26
+
27
+ rag_chain = (
28
+ {"context": retriever | format_docs, "question": RunnablePassthrough()}
29
+ | prompt
30
+ | llm
31
+ | StrOutputParser()
32
+ )
33
+
34
+
35
+ embeddings = OpenAIEmbeddings()
36
+ db = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
37
+
38
+ llm = ChatOpenAI(model="gpt-3.5-turbo-0125")
39
+
40
+ def chat_gen(message, history):
41
+ history_openai_format = []
42
+ for human, assistant in history:
43
+ history_openai_format.append({"role": "user", "content": human })
44
+ history_openai_format.append({"role": "assistant", "content":assistant})
45
+ history_openai_format.append({"role": "user", "content": message})
46
+
47
+ partial_message=""
48
+ for chunk in rag_chain.stream(message):
49
+ # if chunk.choices[0].delta.content is not None:
50
+ partial_message = partial_message + chunk
51
+ yield partial_message
52
+
53
+
54
+ initial_msg = "Hello! I am KCE assistant. You can ask me anything about KCE. I am happy to assist you."
55
+ chatbot = gr.Chatbot(value = [[None, initial_msg]], bubble_full_width=False)
56
+ demo = gr.ChatInterface(chat_gen, chatbot=chatbot).queue()
57
+
58
+
59
+ try:
60
+ demo.launch(debug=True, share=True, show_api=False)
61
+ demo.close()
62
+ except Exception as e:
63
+ demo.close()
64
+ print(e)
65
+ raise e
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ langchain
2
+ langchain_community
3
+ langchain-openai
4
+ tiktoken
5
+ faiss-cpu
6
+ gradio