MedievalChatbot / app.py
PabloVD's picture
First commit
7661979
raw
history blame
927 Bytes
import gradio as gr
from langchain_huggingface import HuggingFaceEndpoint
# Global variables
conversation_retrieval_chain = None
# load the model into the HuggingFaceHub
model_id = "microsoft/Phi-3.5-mini-instruct"
llm_hub = HuggingFaceEndpoint(repo_id=model_id, temperature=0.1, max_new_tokens=600, model_kwargs={"max_length":600})
# llm_hub.client.api_url = 'https://api-inference.huggingface.co/models/'+model_id
def handle_prompt(prompt, chat_history):
# Query the model
output = llm_hub.invoke({"question": prompt, "chat_history": chat_history})
answer = output["result"]
# Update the chat history
chat_history.append((prompt, answer))
# Return the model's response
return answer
greetingsmessage = "Hi, I'm a Chatbot!"
demo = gr.ChatInterface(handle_prompt, type="messages", title="ChatBot", theme='freddyaboulton/dracula_revamped', description=greetingsmessage)
demo.launch()