Spaces:
Sleeping
Sleeping
File size: 1,131 Bytes
81b0953 5ad7b8c 81b0953 242f607 5ad7b8c 81b0953 5ad7b8c 81b0953 70f57d7 81b0953 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 |
# app.py
# import pipelines
# import datasets
import os
if not os.getenv('HF_TOKEN'):
raise ValueError('HF_TOKEN must be set')
from huggingface_hub import InferenceClient
import gradio as gr
from gradio import ChatMessage
MODEL = "meta-llama/Meta-Llama-3-8B-Instruct"
# PROMPT = "What is happiness?"
HF_TOKEN = os.getenv('HF_TOKEN')
client = InferenceClient(MODEL, token=HF_TOKEN)
# inputs = [{"role": "user", "content": PROMPT}]
# output = client.chat_completion(messages, max_tokens=100)
# print(output.choices[0].message.content)
# print(output.model)
def interact_with_agent(prompt, messages):
messages.append(ChatMessage(role="user", content=prompt))
yield messages
# for msg in stream_from_transformers_agent(agent, prompt):
for msg in client.chat_completion(messages, max_tokens=100):
messages.append(msg)
yield messages
yield messages
with gr.Blocks() as demo:
chatbot = gr.Chatbot()
text_input = gr.Textbox(lines=1, label="Chat Message")
text_input.submit(interact_with_agent, [text_input, chatbot], [chatbot])
if __name__ == "__main__":
demo.launch()
|