HarshSanghavi commited on
Commit
b3f99e2
·
verified ·
1 Parent(s): be4b53f

Upload 6 files

Browse files
app.py CHANGED
@@ -27,8 +27,8 @@ def response_generator(prompt: str) -> str:
27
  docs = retriever.invoke(prompt)
28
  my_context = [doc.page_content for doc in docs]
29
  my_context = '\n\n'.join(my_context)
30
-
31
  system_message = SystemMessage(content = SYSTEM_PROMPT.format(context=my_context, previous_message_summary=st.session_state.rag_memory.moving_summary_buffer))
 
32
  chat_messages = (system_message + st.session_state.rag_memory.chat_memory.messages + HumanMessage(content=prompt)).messages
33
  print("total tokens: ", tiktoken_len(str(chat_messages)))
34
  # print("my_context*********",my_context)
@@ -36,7 +36,7 @@ def response_generator(prompt: str) -> str:
36
  return response.content
37
 
38
  except Exception as error:
39
- print(error)
40
  return "Oops! something went wrong, please try again."
41
 
42
 
@@ -72,7 +72,7 @@ if "retriever" not in st.session_state:
72
  st.session_state.retriever = my_vector_store.as_retriever(k=NUMBER_OF_VECTORS_FOR_RAG)
73
 
74
 
75
- st.title("LIC Mitra: Customer Support for LIC Policies")
76
  print("container")
77
  # Display chat messages from history
78
  container = st.container(height=600)
 
27
  docs = retriever.invoke(prompt)
28
  my_context = [doc.page_content for doc in docs]
29
  my_context = '\n\n'.join(my_context)
 
30
  system_message = SystemMessage(content = SYSTEM_PROMPT.format(context=my_context, previous_message_summary=st.session_state.rag_memory.moving_summary_buffer))
31
+ print(system_message)
32
  chat_messages = (system_message + st.session_state.rag_memory.chat_memory.messages + HumanMessage(content=prompt)).messages
33
  print("total tokens: ", tiktoken_len(str(chat_messages)))
34
  # print("my_context*********",my_context)
 
36
  return response.content
37
 
38
  except Exception as error:
39
+ print(error, "ERROR")
40
  return "Oops! something went wrong, please try again."
41
 
42
 
 
72
  st.session_state.retriever = my_vector_store.as_retriever(k=NUMBER_OF_VECTORS_FOR_RAG)
73
 
74
 
75
+ st.title("Insurance Bot")
76
  print("container")
77
  # Display chat messages from history
78
  container = st.container(height=600)
app_config.py CHANGED
@@ -17,10 +17,14 @@ FILE_NAMEs = os.listdir('data')
17
 
18
 
19
  SYSTEM_PROMPT = """
20
- you are LIC Customer Service Chatbot.
21
- Use the following pieces of context to answer the user's question.
22
- If you don't know the answer, just say that you don't know, don't try to make up an answer.
 
 
 
23
 
 
24
  context: {context}
25
  previous message summary: {previous_message_summary}
26
  """
 
17
 
18
 
19
  SYSTEM_PROMPT = """
20
+ You are an insurance policy expert bot. You have different policies which can be found in company list.
21
+ Here is the list of companies providng this policies
22
+ Your tasks when user asks question:
23
+ 1. Familiarize themselves with the policy terms and conditions.
24
+ 2. Clear any doubts they may have about the policy.
25
+ 3. Compare different policies provided by different companies.
26
 
27
+ Your response should be clear, concise and within the given context. If needed you can give detail response. If you can't find the answer in context just say 'I don't know'. Do not try to make up answers by yourself.
28
  context: {context}
29
  previous message summary: {previous_message_summary}
30
  """
data/National_Insurance_Company.pdf ADDED
Binary file (228 kB). View file
 
data/icici.pdf ADDED
Binary file (243 kB). View file
 
data/tata_aig.pdf ADDED
Binary file (223 kB). View file
 
data/uiic.pdf ADDED
Binary file (71.4 kB). View file