Spaces:
Runtime error
Runtime error
File size: 1,011 Bytes
a5ec285 7672203 c94e232 a5ec285 8a31d41 c94e232 b7d5755 adb3f12 b7d5755 adb3f12 a5ec285 b7d5755 a5ec285 b7d5755 a5ec285 b7d5755 a5ec285 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
import gradio as gr
import torch
from diffusers import DiffusionPipeline, DPMSolverMultistepScheduler
from diffusers.utils import export_to_video
device = "cuda" if torch.cuda.is_available() else "cpu"
#model = gr.Interface.load("models/camenduru/text2_video_zero")
# load pipeline
pipe = DiffusionPipeline.from_pretrained("camenduru/text2-video-zero").to(device)
pipe.scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config)
#if device == "cuda":
# optimize for GPU memory
# pipe.enable_model_cpu_offload()
#else:
# pass
#pipe.enable_vae_slicing()
def ttv():
# generate
prompt = "Spiderman is surfing. Darth Vader is also surfing and following Spiderman"
video_frames = model(prompt, num_inference_steps=25, num_frames=20)
# convent to video
#video_path = export_to_video(video_frames)
return video_frames
with gr.Blocks() as app:
inp = gr.Textbox()
btn = gr.Button()
outp = gr.Gallery()
btn.click(ttv,None,outp)
app.launch() |