0504ankitsharma commited on
Commit
807315c
·
verified ·
1 Parent(s): 3587fe0

Update app/main.py

Browse files
Files changed (1) hide show
  1. app/main.py +39 -13
app/main.py CHANGED
@@ -9,11 +9,15 @@ from langchain.chains import create_retrieval_chain
9
  from langchain_community.vectorstores import FAISS
10
  from langchain_community.document_loaders import UnstructuredWordDocumentLoader as DocxLoader
11
  from fastapi.middleware.cors import CORSMiddleware
12
- from fastapi import FastAPI
13
  from pydantic import BaseModel
14
  from langchain_community.embeddings import HuggingFaceBgeEmbeddings
15
  import nltk # Importing NLTK
16
  import time
 
 
 
 
17
 
18
  # Set writable paths for cache and data
19
  cache_dir = '/tmp'
@@ -47,7 +51,7 @@ def clean_response(response):
47
  cleaned = response.strip()
48
 
49
  # Remove any enclosing quotation marks
50
- cleaned = re.sub(r'^["\']+|["\']+$', '', cleaned)
51
 
52
  # Replace multiple newlines with a single newline
53
  cleaned = re.sub(r'\n+', '\n', cleaned)
@@ -67,10 +71,13 @@ app.add_middleware(
67
  allow_headers=["*"],
68
  )
69
 
 
 
 
70
  openai_api_key = os.environ.get('OPENAI_API_KEY')
71
  llm = ChatOpenAI(
72
  api_key=openai_api_key,
73
- model_name="gpt-4-turbo-preview", # or "gpt-3.5-turbo" for a more economical option
74
  temperature=0.7
75
  )
76
 
@@ -81,13 +88,27 @@ def read_root():
81
  class Query(BaseModel):
82
  query_text: str
83
 
 
 
 
 
 
 
 
 
 
84
  prompt = ChatPromptTemplate.from_template(
85
  """
86
  You are a helpful assistant designed specifically for the Thapar Institute of Engineering and Technology (TIET), a renowned technical college. Your task is to answer all queries related to TIET. Every response you provide should be relevant to the context of TIET. If a question falls outside of this context, please decline by stating, 'Sorry, I cannot help with that.' If you do not know the answer to a question, do not attempt to fabricate a response; instead, politely decline.
87
  You may elaborate on your answers slightly to provide more information, but avoid sounding boastful or exaggerating. Stay focused on the context provided.
 
88
  If the query is not related to TIET or falls outside the context of education, respond with:
89
  "Sorry, I cannot help with that. I'm specifically designed to answer questions about the Thapar Institute of Engineering and Technology.
90
  For more information, please contact at our toll-free number: 18002024100 or E-mail us at admissions@thapar.edu
 
 
 
 
91
  <context>
92
  {context}
93
  </context>
@@ -132,33 +153,38 @@ def get_embeddings():
132
  model_norm = HuggingFaceBgeEmbeddings(model_name=model_name, encode_kwargs=encode_kwargs)
133
  return model_norm
134
 
135
- @app.post("/chat") # Changed from /anthropic to /chat
136
- def read_item(query: Query):
137
  try:
 
138
  embeddings = get_embeddings()
139
  vectors = FAISS.load_local("./vectors_db", embeddings, allow_dangerous_deserialization=True)
140
  except Exception as e:
141
  print(f"Error loading vector store: {str(e)}")
142
  return {"response": "Vector Store Not Found or Error Loading. Please run /setup first."}
143
-
144
  prompt1 = query.query_text
145
  if prompt1:
146
  start = time.process_time()
147
  document_chain = create_stuff_documents_chain(llm, prompt)
148
  retriever = vectors.as_retriever()
149
  retrieval_chain = create_retrieval_chain(retriever, document_chain)
150
- response = retrieval_chain.invoke({'input': prompt1})
 
 
 
151
  print("Response time:", time.process_time() - start)
152
-
153
  # Apply the cleaning function to the response
154
  cleaned_response = clean_response(response['answer'])
155
-
156
- # For debugging, print the cleaned response
 
 
157
  print("Cleaned response:", repr(cleaned_response))
158
-
159
- return cleaned_response
160
  else:
161
- return "No Query Found"
162
 
163
  @app.get("/setup")
164
  def setup():
 
9
  from langchain_community.vectorstores import FAISS
10
  from langchain_community.document_loaders import UnstructuredWordDocumentLoader as DocxLoader
11
  from fastapi.middleware.cors import CORSMiddleware
12
+ from fastapi import FastAPI, Depends
13
  from pydantic import BaseModel
14
  from langchain_community.embeddings import HuggingFaceBgeEmbeddings
15
  import nltk # Importing NLTK
16
  import time
17
+ from typing import Dict, Optional
18
+ from fastapi.sessions import SessionMiddleware
19
+ from fastapi.requests import Request
20
+ from fastapi.responses import JSONResponse
21
 
22
  # Set writable paths for cache and data
23
  cache_dir = '/tmp'
 
51
  cleaned = response.strip()
52
 
53
  # Remove any enclosing quotation marks
54
+ cleaned = re.sub(r'^\"+|\"+$', '', cleaned)
55
 
56
  # Replace multiple newlines with a single newline
57
  cleaned = re.sub(r'\n+', '\n', cleaned)
 
71
  allow_headers=["*"],
72
  )
73
 
74
+ # Adding session middleware for contextual memory
75
+ app.add_middleware(SessionMiddleware, secret_key="your-secret-key")
76
+
77
  openai_api_key = os.environ.get('OPENAI_API_KEY')
78
  llm = ChatOpenAI(
79
  api_key=openai_api_key,
80
+ model_name="gpt-4-turbo-preview",
81
  temperature=0.7
82
  )
83
 
 
88
  class Query(BaseModel):
89
  query_text: str
90
 
91
+ # In-memory storage for contextual memory
92
+ user_sessions: Dict[str, Dict[str, str]] = {}
93
+
94
+ def get_user_context(request: Request):
95
+ user_id = request.client.host
96
+ if user_id not in user_sessions:
97
+ user_sessions[user_id] = {}
98
+ return user_id, user_sessions[user_id]
99
+
100
  prompt = ChatPromptTemplate.from_template(
101
  """
102
  You are a helpful assistant designed specifically for the Thapar Institute of Engineering and Technology (TIET), a renowned technical college. Your task is to answer all queries related to TIET. Every response you provide should be relevant to the context of TIET. If a question falls outside of this context, please decline by stating, 'Sorry, I cannot help with that.' If you do not know the answer to a question, do not attempt to fabricate a response; instead, politely decline.
103
  You may elaborate on your answers slightly to provide more information, but avoid sounding boastful or exaggerating. Stay focused on the context provided.
104
+
105
  If the query is not related to TIET or falls outside the context of education, respond with:
106
  "Sorry, I cannot help with that. I'm specifically designed to answer questions about the Thapar Institute of Engineering and Technology.
107
  For more information, please contact at our toll-free number: 18002024100 or E-mail us at admissions@thapar.edu
108
+
109
+ Previous Context:
110
+ {previous_context}
111
+
112
  <context>
113
  {context}
114
  </context>
 
153
  model_norm = HuggingFaceBgeEmbeddings(model_name=model_name, encode_kwargs=encode_kwargs)
154
  return model_norm
155
 
156
+ @app.post("/chat")
157
+ def read_item(query: Query, request: Request):
158
  try:
159
+ user_id, user_context = get_user_context(request)
160
  embeddings = get_embeddings()
161
  vectors = FAISS.load_local("./vectors_db", embeddings, allow_dangerous_deserialization=True)
162
  except Exception as e:
163
  print(f"Error loading vector store: {str(e)}")
164
  return {"response": "Vector Store Not Found or Error Loading. Please run /setup first."}
165
+
166
  prompt1 = query.query_text
167
  if prompt1:
168
  start = time.process_time()
169
  document_chain = create_stuff_documents_chain(llm, prompt)
170
  retriever = vectors.as_retriever()
171
  retrieval_chain = create_retrieval_chain(retriever, document_chain)
172
+
173
+ # Add previous context
174
+ previous_context = user_context.get("context", "None")
175
+ response = retrieval_chain.invoke({'input': prompt1, 'previous_context': previous_context})
176
  print("Response time:", time.process_time() - start)
177
+
178
  # Apply the cleaning function to the response
179
  cleaned_response = clean_response(response['answer'])
180
+
181
+ # Update context
182
+ user_context["context"] = cleaned_response
183
+
184
  print("Cleaned response:", repr(cleaned_response))
185
+ return {"response": cleaned_response}
 
186
  else:
187
+ return {"response": "No Query Found"}
188
 
189
  @app.get("/setup")
190
  def setup():