Kaung Myat Htet commited on
Commit
c55b65b
1 Parent(s): 98e2967

add app.py

Browse files
Files changed (2) hide show
  1. app.py +92 -0
  2. requirements.txt +4 -0
app.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ from langchain_community.vectorstores import FAISS
4
+ from langchain_nvidia_ai_endpoints import NVIDIAEmbeddings
5
+
6
+ from langchain_core.runnables.passthrough import RunnableAssign, RunnablePassthrough
7
+ from langchain.memory import ConversationBufferMemory
8
+ from langchain_core.messages import get_buffer_string
9
+ from langchain_nvidia_ai_endpoints import ChatNVIDIA, NVIDIAEmbeddings
10
+
11
+ from langchain_core.prompts import ChatPromptTemplate
12
+ from langchain_core.output_parsers import StrOutputParser
13
+
14
+
15
+ embedder = NVIDIAEmbeddings(model="nvolveqa_40k", model_type=None)
16
+ db = FAISS.load_local("phuket_faiss", embedder, allow_dangerous_deserialization=True)
17
+
18
+ # docs = new_db.similarity_search(query)
19
+
20
+ nvidia_api_key = os.environ.get("NVIDIA_API_KEY", "")
21
+
22
+
23
+ from operator import itemgetter
24
+
25
+
26
+ # available models names
27
+ # mixtral_8x7b
28
+ # llama2_13b
29
+ llm = ChatNVIDIA(model="mixtral_8x7b") | StrOutputParser()
30
+
31
+ initial_msg = (
32
+ "Hello! I am Roam Mate to help you with your travel!"
33
+ f"\nHow can I help you?"
34
+ )
35
+
36
+ prompt_template = ChatPromptTemplate.from_messages([("system", """
37
+ ### [INST] Instruction: Answer the question based on your knowledge about places in Thailand. You are Roam Mate which is a chat bot to help users with their travel and recommending places according to their reference. Here is context to help:
38
+ Document Retrieval:\n{context}\n
39
+ (Answer only from retrieval. Only cite sources that are used. Make your response conversational.)
40
+
41
+
42
+ ### QUESTION:
43
+ {question} [/INST]
44
+ """), ('user', '{question}')])
45
+
46
+ chain = (
47
+ {
48
+ 'context': db.as_retriever(search_type="similarity", search_kwargs={"k": 10}),
49
+ 'question': (lambda x:x)
50
+ }
51
+ | prompt_template
52
+ # | RPrint()
53
+ | llm
54
+ | StrOutputParser()
55
+ )
56
+
57
+ conv_chain = (
58
+ prompt_template
59
+ # | RPrint()
60
+ | llm
61
+ | StrOutputParser()
62
+ )
63
+
64
+ def chat_gen(message, history, return_buffer=True):
65
+ buffer = ""
66
+
67
+ doc_retriever = db.as_retriever(search_type="similarity_score_threshold", search_kwargs={"score_threshold": 0.2, "k": 10})
68
+ retrieved_docs = doc_retriever.invoke(message)
69
+ print(len(retrieved_docs))
70
+ print(retrieved_docs)
71
+
72
+ if len(retrieved_docs) > 0:
73
+ state = {
74
+ 'question': message,
75
+ 'context': retrieved_docs
76
+ }
77
+ for token in conv_chain.stream(state):
78
+ buffer += token
79
+ yield buffer
80
+
81
+ buffer += "I use the following websites data to generate the above answer: \n"
82
+ for docs in retrieved_docs:
83
+ buffer += f'{docs['metadata']['source']}\n'
84
+ else:
85
+ passage = "I am sorry. I do not have relevant information to answer on that specific topic. Please try another question."
86
+ buffer += passage
87
+ yield buffer if return_buffer else passage
88
+
89
+
90
+ chatbot = gr.Chatbot(value = [[None, initial_msg]])
91
+ iface = gr.ChatInterface(chat_gen, chatbot=chatbot).queue()
92
+ iface.launch()
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ langchain
2
+ langchain-nvidia-ai-endpoints
3
+ gradio
4
+ faiss-cpu