|
import os |
|
from groq import Groq |
|
import gradio as gr |
|
from config import GROQ_API_KEY |
|
|
|
class ConversationalAI: |
|
def __init__(self): |
|
os.environ["GROQ_API_KEY"] = GROQ_API_KEY |
|
self.client = Groq() |
|
self.system_prompt = { |
|
"role": "system", |
|
"content": "You are a useful assistant. You reply with efficient answers. " |
|
} |
|
|
|
async def chat_groq(self, message, history): |
|
messages = [self.system_prompt] |
|
|
|
for msg in history: |
|
messages.append({"role": "user", "content": str(msg[0])}) |
|
messages.append({"role": "assistant", "content": str(msg[1])}) |
|
|
|
messages.append({"role": "user", "content": str(message)}) |
|
|
|
response_content = '' |
|
|
|
stream = self.client.chat.completions.create( |
|
model="llama3-70b-8192", |
|
messages=messages, |
|
max_tokens=1024, |
|
temperature=1.3, |
|
stream=True |
|
) |
|
|
|
for chunk in stream: |
|
content = chunk.choices[0].delta.content |
|
if content: |
|
response_content += chunk.choices[0].delta.content |
|
yield response_content |
|
|
|
def create_chat_interface(self): |
|
with gr.Blocks(theme=gr.themes.Monochrome(), fill_height=True) as demo: |
|
gr.ChatInterface(self.chat_groq, |
|
clear_btn=None, |
|
undo_btn=None, |
|
retry_btn=None, |
|
) |
|
return demo |
|
|
|
if __name__ == "__main__": |
|
ai = ConversationalAI() |
|
demo = ai.create_chat_interface() |
|
demo.queue() |
|
demo.launch() |