Sarah Ciston
remove kwargs in gr.block
70f57d7
# app.py
# import pipelines
# import datasets
import os
if not os.getenv('HF_TOKEN'):
raise ValueError('HF_TOKEN must be set')
from huggingface_hub import InferenceClient
import gradio as gr
from gradio import ChatMessage
MODEL = "meta-llama/Meta-Llama-3-8B-Instruct"
# PROMPT = "What is happiness?"
HF_TOKEN = os.getenv('HF_TOKEN')
client = InferenceClient(MODEL, token=HF_TOKEN)
# inputs = [{"role": "user", "content": PROMPT}]
# output = client.chat_completion(messages, max_tokens=100)
# print(output.choices[0].message.content)
# print(output.model)
def interact_with_agent(prompt, messages):
messages.append(ChatMessage(role="user", content=prompt))
yield messages
# for msg in stream_from_transformers_agent(agent, prompt):
for msg in client.chat_completion(messages, max_tokens=100):
messages.append(msg)
yield messages
yield messages
with gr.Blocks() as demo:
chatbot = gr.Chatbot()
text_input = gr.Textbox(lines=1, label="Chat Message")
text_input.submit(interact_with_agent, [text_input, chatbot], [chatbot])
if __name__ == "__main__":
demo.launch()