piperod commited on
Commit
02cdb95
1 Parent(s): ff9df39

adding predictions

Browse files
Files changed (1) hide show
  1. app.py +21 -7
app.py CHANGED
@@ -26,11 +26,12 @@ import dotenv
26
  dotenv.load_dotenv()
27
  import numpy as np
28
  import gradio as gr
29
-
30
  from inference import inference_frame
31
  import os
32
  import pathlib
33
 
 
34
  def analize_video(x):
35
  print(x)
36
  path = '/tmp/test/'
@@ -63,6 +64,9 @@ def analize_video(x):
63
  def set_example_image(example: list) -> dict:
64
  return gr.Video.update(value=example[0])
65
 
 
 
 
66
 
67
  with gr.Blocks(title='Shark Patrol',theme=gr.themes.Soft(),live=True,) as demo:
68
  gr.Markdown("Initial DEMO.")
@@ -75,20 +79,30 @@ with gr.Blocks(title='Shark Patrol',theme=gr.themes.Soft(),live=True,) as demo:
75
 
76
  video_button = gr.Button("Analyze")
77
  with gr.Row():
78
- paths = sorted(pathlib.Path('videos_example').rglob('*.mp4'))
79
  example_images = gr.Dataset(components=[video_input],
80
  samples=[[path.as_posix()]
81
- for path in paths])
82
-
83
-
84
- with gr.Accordion("Open for More!"):
85
- gr.Markdown("Place holder for detection")
86
 
87
  video_button.click(analize_video, inputs=video_input, outputs=video_output)
88
 
89
  example_images.click(fn=set_example_image,
90
  inputs=example_images,
91
  outputs=video_input)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
92
 
93
  demo.queue()
94
  #if os.getenv('SYSTEM') == 'spaces':
 
26
  dotenv.load_dotenv()
27
  import numpy as np
28
  import gradio as gr
29
+ import glob
30
  from inference import inference_frame
31
  import os
32
  import pathlib
33
 
34
+
35
  def analize_video(x):
36
  print(x)
37
  path = '/tmp/test/'
 
64
  def set_example_image(example: list) -> dict:
65
  return gr.Video.update(value=example[0])
66
 
67
+ def show_video(example: list) -> dict:
68
+ return gr.Video.update(value=example[0])
69
+
70
 
71
  with gr.Blocks(title='Shark Patrol',theme=gr.themes.Soft(),live=True,) as demo:
72
  gr.Markdown("Initial DEMO.")
 
79
 
80
  video_button = gr.Button("Analyze")
81
  with gr.Row():
82
+ paths = sorted(pathlib.Path('videos_example/').rglob('*.mp4'))
83
  example_images = gr.Dataset(components=[video_input],
84
  samples=[[path.as_posix()]
85
+ for path in paths if 'videos_side_by_side' not in str(path)])
 
 
 
 
86
 
87
  video_button.click(analize_video, inputs=video_input, outputs=video_output)
88
 
89
  example_images.click(fn=set_example_image,
90
  inputs=example_images,
91
  outputs=video_input)
92
+
93
+ with gr.Accordion("Current Detections"):
94
+
95
+ with gr.Row():
96
+ video_example = gr.Video(source='upload',include_audio=False,stream=True)
97
+ with gr.Row():
98
+ paths = sorted(pathlib.Path('videos_example/').rglob('*webm'))
99
+ example_preds = gr.Dataset(components=[video_example],
100
+ samples=[[path.as_posix()]
101
+ for path in paths])
102
+ example_preds.click(fn=show_video,
103
+ inputs=example_preds,
104
+ outputs=video_example)
105
+
106
 
107
  demo.queue()
108
  #if os.getenv('SYSTEM') == 'spaces':