Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -195,7 +195,7 @@ def detect(img,model):
|
|
195 |
|
196 |
vid_path, vid_writer = None, None
|
197 |
img = torch.zeros((1, 3, imgsz, imgsz), device=device) # init img
|
198 |
-
_ = model(img.half() if half else img)
|
199 |
model.eval()
|
200 |
|
201 |
|
@@ -273,4 +273,4 @@ def detect(img,model):
|
|
273 |
return Image.fromarray(im0[:,:,::-1])
|
274 |
|
275 |
|
276 |
-
gr.Interface(detect,[gr.Image(type="pil"),gr.Dropdown(choices=["yolopv2","yolop"])], gr.Image(type="pil"),title="Yolopv2",examples=[["example.jpeg", "yolopv2"]],description="demo for <a href='https://github.com/CAIC-AD/YOLOPv2' style='text-decoration: underline' target='_blank'>yolopv2</a> π: Better, Faster, Stronger for Panoptic driving Perception
|
|
|
195 |
|
196 |
vid_path, vid_writer = None, None
|
197 |
img = torch.zeros((1, 3, imgsz, imgsz), device=device) # init img
|
198 |
+
_ = model(img.half() if half else img) # run once
|
199 |
model.eval()
|
200 |
|
201 |
|
|
|
273 |
return Image.fromarray(im0[:,:,::-1])
|
274 |
|
275 |
|
276 |
+
gr.Interface(detect,[gr.Image(type="pil"),gr.Dropdown(choices=["yolopv2","yolop"])], gr.Image(type="pil"),title="Yolopv2",examples=[["example.jpeg", "yolopv2"]],description="demo for <a href='https://github.com/CAIC-AD/YOLOPv2' style='text-decoration: underline' target='_blank'>yolopv2</a> π: Better, Faster, Stronger for Panoptic driving Perception ").launch()
|