|
import gradio as gr |
|
from gradio_client import Client, handle_file |
|
|
|
|
|
moondream_client = Client("vikhyatk/moondream2") |
|
llama_client = Client("goingyt/meta-llama-Llama-3.3-70B-Instruct") |
|
|
|
|
|
history = [] |
|
|
|
|
|
def describe_image(image, user_message): |
|
global history |
|
|
|
|
|
result = moondream_client.predict( |
|
img=handle_file(image), |
|
prompt="Describe this image.", |
|
api_name="/answer_question" |
|
) |
|
|
|
|
|
description = result |
|
|
|
|
|
history.append(("User", user_message)) |
|
history.append(("Assistant", description)) |
|
|
|
llama_result = llama_client.predict( |
|
message=user_message, |
|
history=history, |
|
api_name="/chat" |
|
) |
|
|
|
|
|
return description + "\n\nAssistant: " + llama_result |
|
|
|
|
|
def chat_or_image(image, user_message): |
|
global history |
|
|
|
|
|
if image: |
|
return describe_image(image, user_message) |
|
else: |
|
|
|
history.append(("User", user_message)) |
|
llama_result = llama_client.predict( |
|
message=user_message, |
|
history=history, |
|
api_name="/chat" |
|
) |
|
return llama_result |
|
|
|
|
|
demo = gr.Interface( |
|
fn=chat_or_image, |
|
inputs=[ |
|
gr.Image(type="filepath", label="Resim Yükle (isteğe bağlı)"), |
|
gr.Textbox(label="Soru Sor ya da Konuş", placeholder="Soru sor...", lines=2) |
|
], |
|
outputs="text", |
|
) |
|
|
|
if __name__ == "__main__": |
|
demo.launch() |
|
|