ChatBot / app.py
mou3az's picture
Update app.py
f9d1bd8 verified
raw
history blame
3.98 kB
import gradio as gr
import requests
from IPython.display import Image, display
from langchain.embeddings import SentenceTransformerEmbeddings
from langchain.vectorstores import FAISS
from langchain_community.chat_models.huggingface import ChatHuggingFace
from langchain.schema import SystemMessage, HumanMessage, AIMessage
from langchain_community.llms import HuggingFaceEndpoint
model_name = "sentence-transformers/all-mpnet-base-v2"
embedding_llm = SentenceTransformerEmbeddings(model_name=model_name)
db = FAISS.load_local("faiss_index", embedding_llm, allow_dangerous_deserialization=True)
# Set up Hugging Face model
llm = HuggingFaceEndpoint(
repo_id="HuggingFaceH4/starchat2-15b-v0.1",
task="text-generation",
max_new_tokens=4096,
temperature=0.6,
top_p=0.9,
top_k=40,
repetition_penalty=1.2,
do_sample=True,
)
chat_model = ChatHuggingFace(llm=llm)
messages = [
SystemMessage(content="You are a helpful assistant."),
HumanMessage(content="Hi AI, how are you today?"),
AIMessage(content="I'm great thank you. How can I help you?")
]
def handle_message(message: str, mode: str):
# Check if message is empty
if not message.strip():
return "Enter a valid message."
if mode == "Chat-Message":
return chat_message(message)
elif mode == "Web-Search":
return web_search(message)
elif mode == "Chart-Generator":
return chart_generator(message)
else:
return "Select a valid mode."
def chat_message(message: str):
global messages
prompt = HumanMessage(content=message)
messages.append(prompt)
response = chat_model.invoke(messages)
messages.append(response.content)
if len(messages) >= 6:
messages = messages[-6:]
return f"IT-Assistant: {response.content}"
def web_search(message: str):
global messages
similar_docs = db.similarity_search(message, k=3)
if similar_docs:
source_knowledge = "\n".join([x.page_content for x in similar_docs])
else:
source_knowledge = ""
augmented_prompt = f"""
If the answer to the next query is not contained in the Web Search, say 'No Answer Is Available' and then just give guidance for the query.
Query: {message}
Web Search:
{source_knowledge}
"""
prompt = HumanMessage(content=augmented_prompt)
messages.append(prompt)
response = chat_model.invoke(messages)
messages.append(response.content)
if len(messages) >= 6:
messages = messages[-6:]
return f"IT-Assistant: {response.content}"
def chart_generator(message: str):
# Construct the full chart URL
chart_url = f"https://quickchart.io/natural/{message}"
# Send a GET request to the chart URL
response = requests.get(chart_url)
# Check if the request was successful (status code 200)
if response.status_code == 200:
# Add a description to the message
message_with_description = f"Describe and analyse the content of this chart: {message}"
# Create a HumanMessage object with the message including the description
prompt = HumanMessage(content=message_with_description)
messages.append(prompt)
# Invoke the chat model with the updated messages
response = chat_model.invoke(messages)
messages.append(response.content)
if len(messages) >= 6:
messages = messages[-6:]
# Display the chart image with a smaller size
display(Image(url=chart_url, width=500, height=300))
# Return the message and response
return f"IT-Assistant: {response.content}"
else:
return f"Can't generate this image. Please provide valid chart details."
demo = gr.Interface(
fn=handle_message,
inputs=["text", gr.Radio(["Chat", "Web-Search", "Chart-Generator"], label="mode", info="Choose a mode and enter your message, then click submit to interact.")],
outputs="text",
title="IT Assistant")
demo.launch()