Spaces:
Running
on
Zero
Running
on
Zero
envs
Browse files
app.py
CHANGED
@@ -131,15 +131,15 @@ if not os.path.exists("models/personalized/TUSUN.safetensors"):
|
|
131 |
os.system(f'mv models/personalized/TUSUN.safetensors?download=true models/personalized/TUSUN.safetensors')
|
132 |
print("TUSUN Download!", )
|
133 |
|
134 |
-
mv1 = os.system(f'mv /usr/local/lib/python3.10/site-packages/gradio/helpers.py /usr/local/lib/python3.10/site-packages/gradio/helpers_bkp.py')
|
135 |
-
mv2 = os.system(f'mv helpers.py /usr/local/lib/python3.10/site-packages/gradio/helpers.py')
|
136 |
|
137 |
|
138 |
-
# 检查命令是否成功
|
139 |
-
if mv1 == 0 and mv2 == 0:
|
140 |
-
|
141 |
-
else:
|
142 |
-
|
143 |
|
144 |
|
145 |
# - - - - - examples - - - - - #
|
@@ -305,11 +305,13 @@ class ImageConductor:
|
|
305 |
print("Run!")
|
306 |
if examples_type != "":
|
307 |
### for adapting high version gradio
|
|
|
308 |
first_frame_path = IMAGE_PATH[examples_type]
|
309 |
-
|
|
|
310 |
print("example first_frame_path", first_frame_path)
|
311 |
-
print("example tracking_points", tracking_points)
|
312 |
-
|
313 |
original_width, original_height=384, 256
|
314 |
if isinstance(tracking_points, list):
|
315 |
input_all_points = tracking_points
|
@@ -672,4 +674,4 @@ with block:
|
|
672 |
negative_prompt, seed, randomize_seed, guidance_scale, num_inference_steps, personalized, examples_type],
|
673 |
[output_image, output_video])
|
674 |
|
675 |
-
block.launch()
|
|
|
131 |
os.system(f'mv models/personalized/TUSUN.safetensors?download=true models/personalized/TUSUN.safetensors')
|
132 |
print("TUSUN Download!", )
|
133 |
|
134 |
+
# mv1 = os.system(f'mv /usr/local/lib/python3.10/site-packages/gradio/helpers.py /usr/local/lib/python3.10/site-packages/gradio/helpers_bkp.py')
|
135 |
+
# mv2 = os.system(f'mv helpers.py /usr/local/lib/python3.10/site-packages/gradio/helpers.py')
|
136 |
|
137 |
|
138 |
+
# # 检查命令是否成功
|
139 |
+
# if mv1 == 0 and mv2 == 0:
|
140 |
+
# print("file move success!")
|
141 |
+
# else:
|
142 |
+
# print("file move failed!")
|
143 |
|
144 |
|
145 |
# - - - - - examples - - - - - #
|
|
|
305 |
print("Run!")
|
306 |
if examples_type != "":
|
307 |
### for adapting high version gradio
|
308 |
+
tracking_points = gr.State([])
|
309 |
first_frame_path = IMAGE_PATH[examples_type]
|
310 |
+
points = json.load(open(POINTS[examples_type]))
|
311 |
+
tracking_points.value.extend(points)
|
312 |
print("example first_frame_path", first_frame_path)
|
313 |
+
print("example tracking_points", tracking_points.value)
|
314 |
+
|
315 |
original_width, original_height=384, 256
|
316 |
if isinstance(tracking_points, list):
|
317 |
input_all_points = tracking_points
|
|
|
674 |
negative_prompt, seed, randomize_seed, guidance_scale, num_inference_steps, personalized, examples_type],
|
675 |
[output_image, output_video])
|
676 |
|
677 |
+
block.queue().launch()
|