Update app.py
Browse files
app.py
CHANGED
@@ -120,6 +120,6 @@ def bot_streaming(message, history):
|
|
120 |
|
121 |
demo = gr.ChatInterface(fn=bot_streaming, title="LLaVA NeXT", examples=[{"text": "What is on the flower?", "files":["./bee.jpg"]},
|
122 |
{"text": "How to make this pastry?", "files":["./baklava.png"]}],
|
123 |
-
description="Try [nanoLLaVA](https://huggingface.co/qnguyen3/nanoLLaVA) in this demo. Built on top of [Quyen-SE-v0.1 (Qwen1.5-0.5B)](https://huggingface.co/
|
124 |
stop_btn="Stop Generation", multimodal=True)
|
125 |
demo.launch(debug=True)
|
|
|
120 |
|
121 |
demo = gr.ChatInterface(fn=bot_streaming, title="LLaVA NeXT", examples=[{"text": "What is on the flower?", "files":["./bee.jpg"]},
|
122 |
{"text": "How to make this pastry?", "files":["./baklava.png"]}],
|
123 |
+
description="Try [nanoLLaVA](https://huggingface.co/qnguyen3/nanoLLaVA) in this demo. Built on top of [Quyen-SE-v0.1](https://huggingface.co/vilm/Quyen-SE-v0.1) (Qwen1.5-0.5B) and [Google SigLIP-400M](https://huggingface.co/google/siglip-so400m-patch14-384). Upload an image and start chatting about it, or simply try one of the examples below. If you don't upload an image, you will receive an error.",
|
124 |
stop_btn="Stop Generation", multimodal=True)
|
125 |
demo.launch(debug=True)
|