Spaces:
Running
Running
import streamlit as st | |
from diffusers import DiffusionPipeline | |
import torch | |
from moviepy.editor import * | |
import numpy as np | |
import tempfile, os | |
st.title("π Text-to-Video (Zeroscope)") | |
def load_model(): | |
pipe = DiffusionPipeline.from_pretrained( | |
"cerspense/zeroscope_v2_576w", | |
torch_dtype=torch.float32 | |
) | |
pipe.to("cpu") | |
return pipe | |
pipe = load_model() | |
prompt = st.text_area("Enter prompt (short & descriptive):", max_chars=50) | |
if st.button("Generate Video"): | |
if prompt: | |
with st.spinner("Generating... (may take a few mins on CPU)"): | |
video_frames = pipe(prompt, num_frames=8, height=320, width=576).frames | |
video_filename = tempfile.mktemp(".mp4") | |
clips = [ImageClip(np.array(frame)).set_duration(0.3) for frame in video_frames] | |
final_clip = concatenate_videoclips(clips, method="compose") |