Update app.py
Browse files
app.py
CHANGED
@@ -182,7 +182,7 @@ cache_pipeline = {
|
|
182 |
# raise ValueError(f"Unknown base_model {base_model}")
|
183 |
|
184 |
|
185 |
-
@spaces.GPU(duration=
|
186 |
def infer(
|
187 |
base_model,
|
188 |
variant,
|
@@ -192,6 +192,7 @@ def infer(
|
|
192 |
width=256,
|
193 |
seed=0,
|
194 |
randomize_seed=True,
|
|
|
195 |
):
|
196 |
# if pipe_dict[base_model][variant] is None:
|
197 |
# if base_model == "ModelScope T2V":
|
@@ -255,7 +256,6 @@ def infer(
|
|
255 |
|
256 |
generator = torch.Generator("cpu").manual_seed(seed)
|
257 |
|
258 |
-
progress = gr.Progress(track_tqdm=True)
|
259 |
output = cache_pipeline["pipeline"](
|
260 |
prompt=prompt,
|
261 |
num_frames=16,
|
|
|
182 |
# raise ValueError(f"Unknown base_model {base_model}")
|
183 |
|
184 |
|
185 |
+
@spaces.GPU(duration=60)
|
186 |
def infer(
|
187 |
base_model,
|
188 |
variant,
|
|
|
192 |
width=256,
|
193 |
seed=0,
|
194 |
randomize_seed=True,
|
195 |
+
progress = gr.Progress(track_tqdm=True),
|
196 |
):
|
197 |
# if pipe_dict[base_model][variant] is None:
|
198 |
# if base_model == "ModelScope T2V":
|
|
|
256 |
|
257 |
generator = torch.Generator("cpu").manual_seed(seed)
|
258 |
|
|
|
259 |
output = cache_pipeline["pipeline"](
|
260 |
prompt=prompt,
|
261 |
num_frames=16,
|