Arafath10's picture
Update main.py
53a7825 verified
from fastapi import FastAPI, Request, HTTPException
from fastapi.responses import PlainTextResponse
from fastapi.middleware.cors import CORSMiddleware
from twilio.twiml.messaging_response import MessagingResponse
import os,openai
import google.generativeai as genai
from llama_index.llms import OpenAI
from llama_index import VectorStoreIndex, SimpleDirectoryReader
from llama_index.llms import OpenAI
from llama_index import StorageContext, load_index_from_storage
secret = os.environ["key"]
genai.configure(api_key=secret)
model = genai.GenerativeModel('gemini-1.5-flash')
import user_guide_sync
global index,query_engine
query_engine = index = None
#query_engine = (user_guide_sync.update_user_guide).as_query_engine()
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
@app.post("/update_user_guide_data")
async def update_user_guide_data():
user_guide_sync.update_user_guide()
return "guide updated"
index = None
@app.post("/whatsapp")
async def reply_whatsapp(request: Request):
form_data = await request.form()
num_media = int(form_data.get("NumMedia", 0))
from_number = form_data.get("From")
message_body = form_data.get("Body")
user_query = message_body
response = MessagingResponse()
#msg.media(GOOD_BOY_URL)
try:
openai.api_key = os.environ["OPENAI_API_KEY"]
# Define the messages for the conversation
messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": f"always translate to english(only): '{user_query}' give only translated part only without any extra details" }
]
# Make the API call
gptresponse = openai.chat.completions.create(
model="gpt-4o",
messages=messages
)
user_query = gptresponse.choices[0].message.content
print(f"translated text : {user_query}")
global index # Use the global index variable
if index is None: # Check if the index is already loaded
from llama_index import StorageContext, load_index_from_storage # Import necessary modules
storage_context = StorageContext.from_defaults(persist_dir="llama_index")
index = load_index_from_storage(storage_context=storage_context)
print("Index loaded")
else:
print("Index already loaded")
# Set up a retriever to fetch similar documents directly without full query processing
retriever = index.as_retriever()
# Retrieve the top similar documents based on the user query
similar_docs = retriever.retrieve(user_query) # Adjust `top_k` as needed
# Prepare the context for LLM by concatenating the content of similar documents
context = "\n\n".join([doc.node.text for doc in similar_docs])
prompt = f"""
context : {context}
user query : {user_query}
Instructions:
- First, understand the user question carefully.
- If you find the correct answer from the provided data, respond with detailed steps (1, 2, ...) and always include a more details link.
- If the correct answer is not found in the provided data or proide the correct solution to user using data then output is only this : "contact our help desk". dont add extra anything
"""
messages = [
{"role": "user", "content": prompt }
]
# Make the API call
gptresponse = openai.chat.completions.create(
model="gpt-4o-mini",
messages=messages
)
gpt_response = gptresponse.choices[0].message.content
print(str(gpt_response).lower())
if "contact our help desk" in str(gpt_response).lower() or "our help desk" in str(gpt_response).lower():
print("help desk option")
prompt = f"""
system:
you are parallax technologies chatbot design for answer the user question like a real human.
contact details suppor team link : https://projects.storemate.lk/customer Email : support@storemate.lk Youtube : https://www.youtube.com/channel/UCFkX9Fa-Qe6Qi4V5f0RcfSA Facebook : https://www.facebook.com/storemateinventory web link : https://storemate.lk
only give single answer and don't give answer for general answers(this is CRM system for only pos system clients)
note : don't give any steps for solve the issues but give steps for sytem slow and performance related questions
user:{user_query}
"""
messages = [
{"role": "system", "content": "you are parallax technologies chatbot design for answer the user question like a real human"},
{"role": "user", "content": prompt}
]
#gen_response = model.generate_content(prompt)
gpt_response = openai.chat.completions.create(
model="gpt-4o-mini",
messages=messages,
temperature=0,
)
response.message(str(gpt_response.choices[0].message.content))
#response.message(gen_response.text)
#response.message(gen_response.text +"\n\n"+default)
return PlainTextResponse(str(response), media_type="application/xml")
response.message(str(gpt_response))
#response.message("https://storemate.lk")
return PlainTextResponse(str(response), media_type="application/xml")
except Exception as e:
print(str(e))
response.message("please ask again...!")
return PlainTextResponse(str(response), media_type="application/xml")
# Run the application (Make sure you have the necessary setup to run FastAPI)