File size: 766 Bytes
8fbc209
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
import gradio as gr
import spaces
from PIL import Image
from models.mllava import MLlavaProcessor, LlavaForConditionalGeneration, chat_mllava, MLlavaForConditionalGeneration
from typing import List
processor = MLlavaProcessor()
model = LlavaForConditionalGeneration.from_pretrained("MFuyu/mllava_v2_4096")

@spaces.GPU
def generate(text:str, images:List[Image.Image], history: List[dict]):
    model = model.to("cuda")
    
    for text, history in chat_mllava(text, images, model, processor, history=history, stream=True):
        yield text, history

def build_demo():
    
    

if __name__ == "__main__":
    processor = MLlavaProcessor()
    model = LlavaForConditionalGeneration.from_pretrained("MFuyu/mllava_v2_4096")
    demo = build_demo()
    demo.launch()