Spaces:
Sleeping
Sleeping
File size: 1,595 Bytes
4fbef82 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
import random
import gradio as gr
import requests
API_URL = "https://api-inference.huggingface.co/models/facebook/blenderbot-3B"
headers = {"Authorization": "Bearer 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"}
def query(payload):
response = requests.post(API_URL, headers=headers, json=payload)
return response.json()
def chat(message):
past_user=["what is your name?"]
generated=["I am Sade, Funbi's AI chatbot"]
message = message.lower()
if message.startswith("what is your name"):
response = random.choice(["I am Sade an AI chatbot made by Funbi,how are you?","Sade, an AI chatbot made by Funbi, feel free to ask me anything"])
elif "your name" in message:
response = random.choice(["I am Sade an AI chatbot made by Funbi,how are you?","Sade, an AI chatbot made by Funbi, feel free to ask me anything"])
elif "who are you" in message:
response = random.choice(["I am Sade an AI chatbot made by Funbi,how are you?","Sade, an AI chatbot made by Funbi, feel free to ask me anything"])
else:
response = query({"inputs": {"past_user_inputs":past_user,"generated_responses":generated,"text":message},})
response = response['generated_text']
past_user.append(message)
generated.append(response)
#history.append((message, response))
return response
demo = gr.Interface(
chat,
inputs="text",
outputs="text",
title="Chatbot",
description="This is chatbot made by using a pre-train model by Facebook called blender and I then primed it with a little extra information",
)
demo.launch()
|