Spaces:
Running
Running
File size: 1,264 Bytes
1442014 10dd914 1442014 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
import os
from groq import Groq
import gradio as gr
#
api_key = os.getenv("GROQ_API_KEY2")
#.
client = Groq(api_key=api_key)
#
system_prompt = {
"role": "system",
"content": "You are a useful assistant. You reply with efficient answers."
}
#
async def chat_groq(message, history):
messages = [system_prompt]
for msg in history:
messages.append({"role": "user", "content": str(msg[0])})
messages.append({"role": "assistant", "content": str(msg[1])})
messages.append({"role": "user", "content": str(message)})
response_content = ''
# modelo `llama-3.1-70b-versatile`
stream = client.chat.completions.create(
model="llama-3.1-70b-versatile",
messages=messages,
max_tokens=1024,
temperature=1.3,
stream=True
)
for chunk in stream:
content = chunk.choices[0].delta.content
if content:
response_content += chunk.choices[0].delta.content
yield response_content
# Interface Gradio
with gr.Blocks(theme=gr.themes.Monochrome()) as demo:
gr.ChatInterface(chat_groq,
clear_btn=None,
undo_btn=None,
retry_btn=None)
demo.queue()
demo.launch()
|