Ubuntu
commited on
Commit
·
a72d2fc
1
Parent(s):
fd2e5e6
minor change
Browse files
app.py
CHANGED
@@ -253,11 +253,11 @@ with gr.Blocks() as MiniAIdemo:
|
|
253 |
im_match_in1 = gr.Image(type='filepath', height=300)
|
254 |
gr.Examples(
|
255 |
[
|
256 |
-
|
257 |
-
|
258 |
-
|
259 |
-
|
260 |
-
|
261 |
],
|
262 |
inputs=im_match_in1
|
263 |
)
|
@@ -265,11 +265,11 @@ with gr.Blocks() as MiniAIdemo:
|
|
265 |
im_match_in2 = gr.Image(type='filepath', height=300)
|
266 |
gr.Examples(
|
267 |
[
|
268 |
-
|
269 |
-
|
270 |
-
|
271 |
-
|
272 |
-
|
273 |
],
|
274 |
inputs=im_match_in2
|
275 |
)
|
@@ -284,14 +284,14 @@ with gr.Blocks() as MiniAIdemo:
|
|
284 |
im_liveness_in = gr.Image(type='filepath', height=300)
|
285 |
gr.Examples(
|
286 |
[
|
287 |
-
|
288 |
-
|
289 |
-
|
290 |
-
|
291 |
-
|
292 |
-
|
293 |
-
|
294 |
-
|
295 |
],
|
296 |
inputs=im_liveness_in
|
297 |
)
|
@@ -309,12 +309,12 @@ with gr.Blocks() as MiniAIdemo:
|
|
309 |
im_emotion_in = gr.Image(type='filepath', height=300)
|
310 |
gr.Examples(
|
311 |
[
|
312 |
-
|
313 |
-
|
314 |
-
|
315 |
-
|
316 |
-
|
317 |
-
|
318 |
],
|
319 |
inputs=im_emotion_in
|
320 |
)
|
|
|
253 |
im_match_in1 = gr.Image(type='filepath', height=300)
|
254 |
gr.Examples(
|
255 |
[
|
256 |
+
"images/compare/demo-pic22.jpg",
|
257 |
+
"images/compare/demo-pic60.jpg",
|
258 |
+
"images/compare/demo-pic35.jpg",
|
259 |
+
"images/compare/demo-pic33.jpg",
|
260 |
+
"images/compare/demo-pic34.jpg",
|
261 |
],
|
262 |
inputs=im_match_in1
|
263 |
)
|
|
|
265 |
im_match_in2 = gr.Image(type='filepath', height=300)
|
266 |
gr.Examples(
|
267 |
[
|
268 |
+
"images/compare/demo-pic41.jpg",
|
269 |
+
"images/compare/demo-pic32.jpg",
|
270 |
+
"images/compare/demo-pic39.jpg",
|
271 |
+
"images/compare/demo-pic61.jpg",
|
272 |
+
"images/compare/demo-pic40.jpg",
|
273 |
],
|
274 |
inputs=im_match_in2
|
275 |
)
|
|
|
284 |
im_liveness_in = gr.Image(type='filepath', height=300)
|
285 |
gr.Examples(
|
286 |
[
|
287 |
+
"images/liveness/f_real_andr.jpg",
|
288 |
+
"images/liveness/f_fake_andr_mask3d.jpg",
|
289 |
+
"images/liveness/f_fake_andr_monitor.jpg",
|
290 |
+
"images/liveness/f_fake_andr_outline.jpg",
|
291 |
+
"images/liveness/f_fake_andr_outline3d.jpg",
|
292 |
+
"images/liveness/1.jpg",
|
293 |
+
"images/liveness/3.png",
|
294 |
+
"images/liveness/4.jpg",
|
295 |
],
|
296 |
inputs=im_liveness_in
|
297 |
)
|
|
|
309 |
im_emotion_in = gr.Image(type='filepath', height=300)
|
310 |
gr.Examples(
|
311 |
[
|
312 |
+
"images/emotion/1.jpg",
|
313 |
+
"images/emotion/2.jpg",
|
314 |
+
"images/emotion/3.jpg",
|
315 |
+
"images/emotion/4.jpg",
|
316 |
+
"images/emotion/5.jpg",
|
317 |
+
"images/emotion/6.jpg",
|
318 |
],
|
319 |
inputs=im_emotion_in
|
320 |
)
|