Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -6,10 +6,13 @@ import time
|
|
6 |
from PIL import Image
|
7 |
import torch
|
8 |
import spaces
|
|
|
9 |
|
10 |
-
|
|
|
|
|
11 |
|
12 |
-
model = LlavaNextForConditionalGeneration.from_pretrained(
|
13 |
model.to("cuda:0")
|
14 |
|
15 |
@spaces.GPU
|
@@ -50,7 +53,7 @@ def bot_streaming(message, history):
|
|
50 |
yield generated_text_without_prompt
|
51 |
|
52 |
|
53 |
-
demo = gr.ChatInterface(fn=bot_streaming, title="
|
54 |
{"text": "How to make this pastry?", "files":["./baklava.png"]}],
|
55 |
description="Try [LLaVA NeXT](https://huggingface.co/docs/transformers/main/en/model_doc/llava_next) in this demo (more specifically, the [Mistral-7B variant](https://huggingface.co/llava-hf/llava-v1.6-mistral-7b-hf)). Upload an image and start chatting about it, or simply try one of the examples below. If you don't upload an image, you will receive an error.",
|
56 |
stop_btn="Stop Generation", multimodal=True)
|
|
|
6 |
from PIL import Image
|
7 |
import torch
|
8 |
import spaces
|
9 |
+
import os
|
10 |
|
11 |
+
MODEL_ID = os.get("MODEL_ID", "llava-hf/llava-v1.6-mistral-7b-hf")
|
12 |
+
REVISION = os.get("MODEL_REVISION", "main")
|
13 |
+
processor = LlavaNextProcessor.from_pretrained(MODEL_ID, revision=REVISION)
|
14 |
|
15 |
+
model = LlavaNextForConditionalGeneration.from_pretrained(MODEL_ID, revision=REVISION, torch_dtype=torch.float16, low_cpu_mem_usage=True)
|
16 |
model.to("cuda:0")
|
17 |
|
18 |
@spaces.GPU
|
|
|
53 |
yield generated_text_without_prompt
|
54 |
|
55 |
|
56 |
+
demo = gr.ChatInterface(fn=bot_streaming, title="VLM Playground", examples=[{"text": "What is on the flower?", "files":["./bee.jpg"]},
|
57 |
{"text": "How to make this pastry?", "files":["./baklava.png"]}],
|
58 |
description="Try [LLaVA NeXT](https://huggingface.co/docs/transformers/main/en/model_doc/llava_next) in this demo (more specifically, the [Mistral-7B variant](https://huggingface.co/llava-hf/llava-v1.6-mistral-7b-hf)). Upload an image and start chatting about it, or simply try one of the examples below. If you don't upload an image, you will receive an error.",
|
59 |
stop_btn="Stop Generation", multimodal=True)
|