freealise commited on
Commit
30d0a53
·
verified ·
1 Parent(s): 71a72e5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -66
app.py CHANGED
@@ -94,7 +94,7 @@ def create_video(frames, fps, type):
94
  return type + "_result.mp4"
95
 
96
 
97
- def infer(url_in,interpolation,fps_output,resize_n,winsize,o_flow):
98
 
99
  fps_output = logscale(fps_output)
100
  # 1. break video into frames and get FPS
@@ -131,64 +131,10 @@ def infer(url_in,interpolation,fps_output,resize_n,winsize,o_flow):
131
  final_vid = create_video(result_frames, fps_output, "interpolated")
132
 
133
  files = final_vid
134
-
135
- depth_map = cv2.VideoCapture(final_vid)
136
  print("interpolated frames: " + str(len(frames_list)) + " -> " + str(len(result_frames)))
137
-
138
- depth_frames = []
139
- ret, fr1 = depth_map.read()
140
- prvs = cv2.cvtColor(fr1, cv2.COLOR_RGBA2GRAY)
141
- hsv = np.zeros_like(fr1)
142
- hsv[..., 1] = 255
143
- res = np.zeros_like(prvs)
144
- flow = res
145
-
146
- i=0
147
- while(depth_map.isOpened()):
148
- ret, fr2 = depth_map.read()
149
- if ret == False:
150
- if o_flow == False:
151
- rgb = prvs
152
- alpha = 1.0/(i+1)
153
- beta = (1.0 - alpha)
154
- res = cv2.addWeighted(rgb, alpha, res, beta, 0.0, res)
155
- break
156
- nxt = cv2.cvtColor(fr2, cv2.COLOR_RGBA2GRAY)
157
- if o_flow == True:
158
- fl = cv2.calcOpticalFlowFarneback(prvs, nxt, flow, 0.5, 7, winsize, 7, 7, 1.5, 0)
159
- mag, ang = cv2.cartToPolar(fl[..., 0], fl[..., 1])
160
- hsv[..., 0] = ang*180/np.pi/2
161
- hsv[..., 2] = cv2.normalize(mag, None, 0, 255, cv2.NORM_MINMAX)
162
- rgb = cv2.cvtColor(hsv, cv2.COLOR_HSV2RGB)
163
- rgb = cv2.cvtColor(rgb, cv2.COLOR_RGBA2GRAY)
164
- else:
165
- rgb = prvs
166
-
167
- alpha = 1.0/(i+1)
168
- beta = (1.0 - alpha)
169
- res = cv2.addWeighted(rgb, alpha, res, beta, 0.0, res)
170
- rgb = cv2.cvtColor(rgb, cv2.COLOR_GRAY2RGB)
171
-
172
- lo = np.array([0,0,0])
173
- hi = np.array([16,16,16])
174
- mask = cv2.inRange(rgb, lo, hi)
175
- #m = cv2.bitwise_and(mask_f, mask)
176
- #diff = np.abs(rgba - bg)
177
- fr2[mask>0] = (255,255,255)
178
-
179
- cv2.imwrite(f"opticalfb{i}.png", fr2)
180
- depth_frames.append(f"opticalfb{i}.png")
181
-
182
- i+=1
183
- prvs = nxt
184
-
185
- print("averaged frames: " + str(len(result_frames)))
186
- depth_vid = create_video(depth_frames, fps_output, "depth_map")
187
- cv2.imwrite('opticalfb.png', res)
188
- depth_map.release()
189
  cv2.destroyAllWindows()
190
 
191
- return final_vid, files, depth_vid, 'opticalfb.png'
192
 
193
 
194
  def logscale(linear):
@@ -233,11 +179,9 @@ with gr.Blocks() as demo:
233
  video_input = gr.Video()
234
  video_input.change(fn=loadurl, inputs=[video_input], outputs=[url_input])
235
  resize_num = gr.Slider(minimum=1, maximum=4096, step=1, value=256, label="Resize to width: ")
236
- of_check = gr.Checkbox(value=True, label="Detect motion to remove background: ")
237
- winsize_num = gr.Slider(minimum=1, maximum=256, step=1, value=15, label="Motion detection window size: ")
238
  with gr.Row():
239
- interpolation_slider = gr.Slider(minimum=1, maximum=5, step=1, value=1, label="Interpolation Steps: ")
240
- interpolation = gr.Number(value=2, show_label=False, interactive=False)
241
  interpolation_slider.change(fn=logscale, inputs=[interpolation_slider], outputs=[interpolation])
242
  with gr.Row():
243
  fps_output_slider = gr.Slider(minimum=0, maximum=5, step=1, value=0, label="FPS output: ")
@@ -248,17 +192,15 @@ with gr.Blocks() as demo:
248
  with gr.Column():
249
  video_output = gr.Video()
250
  file_output = gr.File()
251
- depth_output = gr.Video()
252
- depth_avg = gr.Image()
253
 
254
  gr.Examples(
255
- examples=[["./examples/man-in-museum-reverse-cut.mp4", 2, 0, 256, 15, True]],
256
  fn=infer,
257
- inputs=[url_input, interpolation_slider, fps_output_slider, resize_num, winsize_num, of_check],
258
- outputs=[video_output, file_output, depth_output, depth_avg],
259
  cache_examples=True
260
  )
261
 
262
- submit_btn.click(fn=infer, inputs=[url_input, interpolation_slider, fps_output_slider, resize_num, winsize_num, of_check], outputs=[video_output, file_output, depth_output, depth_avg])
263
 
264
  demo.launch()
 
94
  return type + "_result.mp4"
95
 
96
 
97
+ def infer(url_in,interpolation,fps_output,resize_n):
98
 
99
  fps_output = logscale(fps_output)
100
  # 1. break video into frames and get FPS
 
131
  final_vid = create_video(result_frames, fps_output, "interpolated")
132
 
133
  files = final_vid
 
 
134
  print("interpolated frames: " + str(len(frames_list)) + " -> " + str(len(result_frames)))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
135
  cv2.destroyAllWindows()
136
 
137
+ return final_vid, files
138
 
139
 
140
  def logscale(linear):
 
179
  video_input = gr.Video()
180
  video_input.change(fn=loadurl, inputs=[video_input], outputs=[url_input])
181
  resize_num = gr.Slider(minimum=1, maximum=4096, step=1, value=256, label="Resize to width: ")
 
 
182
  with gr.Row():
183
+ interpolation_slider = gr.Slider(minimum=1, maximum=5, step=1, value=0, label="Interpolation Steps: ")
184
+ interpolation = gr.Number(value=1, show_label=False, interactive=False)
185
  interpolation_slider.change(fn=logscale, inputs=[interpolation_slider], outputs=[interpolation])
186
  with gr.Row():
187
  fps_output_slider = gr.Slider(minimum=0, maximum=5, step=1, value=0, label="FPS output: ")
 
192
  with gr.Column():
193
  video_output = gr.Video()
194
  file_output = gr.File()
 
 
195
 
196
  gr.Examples(
197
+ examples=[["./examples/man-in-museum-reverse-cut.mp4", 0, 0, 256]],
198
  fn=infer,
199
+ inputs=[url_input, interpolation_slider, fps_output_slider, resize_num],
200
+ outputs=[video_output, file_output],
201
  cache_examples=True
202
  )
203
 
204
+ submit_btn.click(fn=infer, inputs=[url_input, interpolation_slider, fps_output_slider, resize_num], outputs=[video_output, file_output])
205
 
206
  demo.launch()