robinroy03's picture
updated app.py, initial rag demo ready. pip requirements also updated.
c74400e
raw
history blame
No virus
5.88 kB
import discord
import os
import json
import requests
import threading
intents = discord.Intents.default()
intents.message_content = True
bot = discord.Bot(intents = intents)
token = os.environ.get('TOKEN_DISCORD')
class Like_Dislike(discord.ui.View):
@discord.ui.button(style=discord.ButtonStyle.primary, emoji="πŸ‘")
async def like_button(self, button, interaction):
await interaction.response.send_message("You liked the response")
@discord.ui.button(style=discord.ButtonStyle.primary, emoji="πŸ‘Ž")
async def dislike_button(self, button, interaction):
await interaction.response.send_message("You disliked the response")
@bot.event
async def on_ready():
print(f"{bot.user} is ready and online!")
@bot.slash_command(name="help", description="list of commands and other info.")
async def help(ctx: discord.ApplicationContext):
await ctx.respond("Hello! FURY Bot responds to all your messages\
\n1)Inside Forum channel and\
\n2)Those that tag the bot.")
def llm_output(question: str, context: str) -> str:
"""
Returns output from the LLM using the given user-question and retrived context
"""
URL_LLM = 'https://robinroy03-fury-bot.hf.space'
# URL_LLM = 'http://localhost:11434' # NOTE: FOR TESTING
prompt = f"""
You are a senior developer. Answer the users question based on the context provided.
Question: {question}
Context: {context}
"""
obj = {
'model': 'phi3',
'prompt': prompt,
'stream': False
}
response = requests.post(URL_LLM + "/api/generate", json=obj)
response_json = json.loads(response.text)
return response_json['response']
def embedding_output(message: str) -> list:
"""
Returns embeddings for the given message
rtype: list of embeddings. Length depends on the model.
"""
URL_EMBEDDING = 'https://robinroy03-fury-embeddings-endpoint.hf.space'
response = requests.post(URL_EMBEDDING + "/embedding", json={"text": message})
response_json = json.loads(response.text)
return response_json['output']
def db_output(embedding: list) -> dict:
"""
Returns the KNN results.
rtype: JSON
"""
URL_DB = 'https://robinroy03-fury-db-endpoint.hf.space'
response = requests.post(URL_DB + "/query", json={"embeddings": embedding})
response_json = json.loads(response.text)
return response_json
@bot.event
async def on_message(message):
"""
Returns llm answer with the relevant context.
"""
if (message.author == bot.user) or not(bot.user.mentioned_in(message)):
return
print(message.content)
await message.reply(content="Your message was received, it'll take around 30 seconds for FURY to process an answer.")
question = message.content.replace("<@1243428204124045385>", "")
embedding: list = embedding_output(question)
db_knn: dict = db_output(embedding)
llm_answer: str = llm_output(question, db_knn['matches'][0]['metadata']['text']) # for the highest knn result (for the test only right now) TODO: make this better
try:
await message.reply(content=llm_answer, view=Like_Dislike())
await message.reply(content=db_knn['matches'][0]['metadata']['text'])
except Exception as e: # TODO: make exception handling better
print(e)
await message.reply("An error occurred. Retry again.")
# @bot.event
# async def on_message(message):
# url_llm = 'https://robinroy03-fury-bot.hf.space'
# url_embedding = 'https://robinroy03-fury-embeddings-endpoint.hf.space'
# url_db = 'https://robinroy03-fury-db-endpoint.hf.space'
# PROMPT = """
# You are a senior developer. Answer the users question based on the context provided.
# Question: {question}
# Context: {context}
# """
# user_question = message.content.replace("<@1243428204124045385>", "")
# user_question_embedding = requests.post(url_embedding + "/embedding", json={"text": user_question})
# user_question_embedding = json.loads(user_question_embedding.text)['output']
# print(user_question_embedding)
# user_question_retrived_db_context = requests.post(url_db + "/query", json={"embeddings": user_question_embedding})
# user_question_retrived_db_context = json.loads(user_question_retrived_db_context.text)['matches'][0]['metadata']['text']
# print(user_question_retrived_db_context)
# PROMPT = PROMPT.format(question=user_question, context=user_question_retrived_db_context)
# print(PROMPT)
# obj = {
# # 'user': message.author.id,
# 'model': 'phi3',
# 'prompt': PROMPT,
# 'stream': False
# }
# if (message.author != bot.user) and (bot.user.mentioned_in(message)):
# await message.reply(content="Your message was received, it'll take around 10 seconds for FURY to process an answer.")
# try:
# return_obj = requests.post(url_llm + "/api/generate", json=obj)
# return_json = json.loads(return_obj.text)
# await message.reply(content=return_json['response'] + "\n\n" + user_question_retrived_db_context, view=Like_Dislike())
# except requests.exceptions.RequestException as e:
# print(e)
# await message.reply(content="Sorry something internally went wrong. Retry again.")
bot.run(token)
# def run_bot():
# bot.run(token)
# threading.Thread(target=run_bot).start()
# ------------------------------------------------------------------------------------------------------------------------------
# import gradio as gr
# demo = gr.Blocks()
# with demo:
# gr.HTML("The bot is working..")
# demo.queue().launch()