Spaces:
Running
on
A100
Running
on
A100
tokenid
commited on
Commit
•
397413a
1
Parent(s):
74b23d0
update
Browse files- app.py +7 -15
- examples/x_cube.jpg +0 -0
app.py
CHANGED
@@ -123,7 +123,6 @@ def preprocess(input_image, do_remove_background):
|
|
123 |
return input_image
|
124 |
|
125 |
|
126 |
-
@spaces.GPU
|
127 |
def generate_mvs(input_image, sample_steps, sample_seed):
|
128 |
|
129 |
seed_everything(sample_seed)
|
@@ -141,8 +140,6 @@ def generate_mvs(input_image, sample_steps, sample_seed):
|
|
141 |
|
142 |
return z123_image, show_image
|
143 |
|
144 |
-
|
145 |
-
@spaces.GPU
|
146 |
def make_mesh(mesh_fpath, planes):
|
147 |
|
148 |
mesh_basename = os.path.basename(mesh_fpath).split('.')[0]
|
@@ -168,9 +165,10 @@ def make_mesh(mesh_fpath, planes):
|
|
168 |
|
169 |
return mesh_fpath
|
170 |
|
171 |
-
|
172 |
@spaces.GPU
|
173 |
-
def make3d(
|
|
|
|
|
174 |
|
175 |
images = np.asarray(images, dtype=np.float32) / 255.0
|
176 |
images = torch.from_numpy(images).permute(2, 0, 1).contiguous().float() # (3, 960, 640)
|
@@ -223,7 +221,7 @@ def make3d(images):
|
|
223 |
|
224 |
mesh_fpath = make_mesh(mesh_fpath, planes)
|
225 |
|
226 |
-
return video_fpath, mesh_fpath
|
227 |
|
228 |
|
229 |
_HEADER_ = '''
|
@@ -298,7 +296,7 @@ with gr.Blocks() as demo:
|
|
298 |
],
|
299 |
inputs=[input_image],
|
300 |
label="Examples",
|
301 |
-
examples_per_page=
|
302 |
)
|
303 |
|
304 |
with gr.Column():
|
@@ -330,20 +328,14 @@ with gr.Blocks() as demo:
|
|
330 |
gr.Markdown(_LINKS_)
|
331 |
gr.Markdown(_CITE_)
|
332 |
|
333 |
-
mv_images = gr.State()
|
334 |
-
|
335 |
submit.click(fn=check_input_image, inputs=[input_image]).success(
|
336 |
fn=preprocess,
|
337 |
inputs=[input_image, do_remove_background],
|
338 |
outputs=[processed_image],
|
339 |
-
).success(
|
340 |
-
fn=generate_mvs,
|
341 |
-
inputs=[processed_image, sample_steps, sample_seed],
|
342 |
-
outputs=[mv_images, mv_show_images],
|
343 |
).success(
|
344 |
fn=make3d,
|
345 |
-
inputs=[
|
346 |
-
outputs=[output_video, output_model_obj]
|
347 |
)
|
348 |
|
349 |
demo.launch()
|
|
|
123 |
return input_image
|
124 |
|
125 |
|
|
|
126 |
def generate_mvs(input_image, sample_steps, sample_seed):
|
127 |
|
128 |
seed_everything(sample_seed)
|
|
|
140 |
|
141 |
return z123_image, show_image
|
142 |
|
|
|
|
|
143 |
def make_mesh(mesh_fpath, planes):
|
144 |
|
145 |
mesh_basename = os.path.basename(mesh_fpath).split('.')[0]
|
|
|
165 |
|
166 |
return mesh_fpath
|
167 |
|
|
|
168 |
@spaces.GPU
|
169 |
+
def make3d(input_image, sample_steps, sample_seed):
|
170 |
+
|
171 |
+
images, show_images = generate_mvs(input_image, sample_steps, sample_seed)
|
172 |
|
173 |
images = np.asarray(images, dtype=np.float32) / 255.0
|
174 |
images = torch.from_numpy(images).permute(2, 0, 1).contiguous().float() # (3, 960, 640)
|
|
|
221 |
|
222 |
mesh_fpath = make_mesh(mesh_fpath, planes)
|
223 |
|
224 |
+
return video_fpath, mesh_fpath, show_images
|
225 |
|
226 |
|
227 |
_HEADER_ = '''
|
|
|
296 |
],
|
297 |
inputs=[input_image],
|
298 |
label="Examples",
|
299 |
+
examples_per_page=15
|
300 |
)
|
301 |
|
302 |
with gr.Column():
|
|
|
328 |
gr.Markdown(_LINKS_)
|
329 |
gr.Markdown(_CITE_)
|
330 |
|
|
|
|
|
331 |
submit.click(fn=check_input_image, inputs=[input_image]).success(
|
332 |
fn=preprocess,
|
333 |
inputs=[input_image, do_remove_background],
|
334 |
outputs=[processed_image],
|
|
|
|
|
|
|
|
|
335 |
).success(
|
336 |
fn=make3d,
|
337 |
+
inputs=[processed_image, sample_steps, sample_seed],
|
338 |
+
outputs=[output_video, output_model_obj, mv_show_images]
|
339 |
)
|
340 |
|
341 |
demo.launch()
|
examples/x_cube.jpg
DELETED
Binary file (58.9 kB)
|
|