hank1996 commited on
Commit
27bbd71
·
1 Parent(s): e0b6202

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -9
app.py CHANGED
@@ -78,14 +78,14 @@ def detect(img,model):
78
  nms_time = AverageMeter()
79
 
80
  # Load model
81
- model = attempt_load(weights, map_location=device) # load FP32 model
82
- stride = int(model.stride.max()) # model stride
83
- imgsz = check_img_size(imgsz, s=stride) # check img_size
84
-
85
- #stride =32
86
- #model = torch.jit.load(weights)
87
- #print(model)
88
-
89
  #model = model.to(device)
90
  #print(111111111)
91
 
@@ -192,4 +192,4 @@ def detect(img,model):
192
  return Image.fromarray(im0[:,:,::-1])
193
 
194
 
195
- gr.Interface(detect,[gr.Image(type="pil"),gr.Dropdown(choices=["yolov7-e6e","yolopv2"])], gr.Image(type="pil"),title="Yolopv2",examples=[["horses.jpeg", "yolov7-e6e"]],description="demo for <a href='https://github.com/CAIC-AD/YOLOPv2' style='text-decoration: underline' target='_blank'>WongKinYiu/yolov7</a> Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors").launch()
 
78
  nms_time = AverageMeter()
79
 
80
  # Load model
81
+ #model = attempt_load(weights, map_location=device) # load FP32 model
82
+ #stride = int(model.stride.max()) # model stride
83
+ #imgsz = check_img_size(imgsz, s=stride) # check img_size
84
+ print(weights)
85
+ stride =32
86
+ model = torch.jit.load(weights)
87
+ print(model)
88
+ imgsz = check_img_size(imgsz, s=stride)
89
  #model = model.to(device)
90
  #print(111111111)
91
 
 
192
  return Image.fromarray(im0[:,:,::-1])
193
 
194
 
195
+ gr.Interface(detect,[gr.Image(type="pil"),gr.Dropdown(choices=["yolov7-e6e","yolopv2"])], gr.Image(type="pil"),title="Yolopv2",examples=[["horses.jpeg", "yolopv2"]],description="demo for <a href='https://github.com/CAIC-AD/YOLOPv2' style='text-decoration: underline' target='_blank'>WongKinYiu/yolov7</a> Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors").launch()