DawnC commited on
Commit
8f083eb
1 Parent(s): 1959409

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -34
app.py CHANGED
@@ -36,8 +36,6 @@ from ultralytics import YOLO
36
  from device_manager import DeviceManager, device_handler
37
  import asyncio
38
  import traceback
39
- import spaces
40
-
41
 
42
  # model_yolo = YOLO('yolov8l.pt')
43
 
@@ -622,8 +620,7 @@ def preprocess_image(image):
622
  ])
623
 
624
  return transform(image).unsqueeze(0)
625
-
626
- @device_handler
627
  async def predict_single_dog(image):
628
  """
629
  Predicts the dog breed using only the classifier.
@@ -650,41 +647,49 @@ async def predict_single_dog(image):
650
  return probabilities[0], breeds[:3], relative_probs
651
 
652
  except RuntimeError as e:
653
- if "out of memory" in str(e) or "CUDA" in str(e):
654
- logger.warning("ZeroGPU unavailable, falling back to CPU")
655
  device_mgr._current_device = torch.device('cpu')
656
- model.to('cpu')
657
- return await predict_single_dog(image) # 遞迴調用,使用 CPU
658
  raise e
659
 
660
 
661
- @device_handler
662
  async def detect_multiple_dogs(image, conf_threshold=0.3, iou_threshold=0.55):
663
- results = model_yolo(image, conf=conf_threshold, iou=iou_threshold)[0]
664
- dogs = []
665
- boxes = []
666
- for box in results.boxes:
667
- if box.cls == 16: # COCO dataset class for dog is 16
668
- xyxy = box.xyxy[0].tolist()
669
- confidence = box.conf.item()
670
- boxes.append((xyxy, confidence))
671
-
672
- if not boxes:
673
- dogs.append((image, 1.0, [0, 0, image.width, image.height]))
674
- else:
675
- nms_boxes = non_max_suppression(boxes, iou_threshold)
676
-
677
- for box, confidence in nms_boxes:
678
- x1, y1, x2, y2 = box
679
- w, h = x2 - x1, y2 - y1
680
- x1 = max(0, x1 - w * 0.05)
681
- y1 = max(0, y1 - h * 0.05)
682
- x2 = min(image.width, x2 + w * 0.05)
683
- y2 = min(image.height, y2 + h * 0.05)
684
- cropped_image = image.crop((x1, y1, x2, y2))
685
- dogs.append((cropped_image, confidence, [x1, y1, x2, y2]))
686
-
687
- return dogs
 
 
 
 
 
 
 
 
 
 
688
 
689
  def non_max_suppression(boxes, iou_threshold):
690
  keep = []
 
36
  from device_manager import DeviceManager, device_handler
37
  import asyncio
38
  import traceback
 
 
39
 
40
  # model_yolo = YOLO('yolov8l.pt')
41
 
 
620
  ])
621
 
622
  return transform(image).unsqueeze(0)
623
+
 
624
  async def predict_single_dog(image):
625
  """
626
  Predicts the dog breed using only the classifier.
 
647
  return probabilities[0], breeds[:3], relative_probs
648
 
649
  except RuntimeError as e:
650
+ if "out of memory" in str(e):
651
+ logger.warning("GPU memory exceeded, falling back to CPU")
652
  device_mgr._current_device = torch.device('cpu')
653
+ return await predict_single_dog(image)
 
654
  raise e
655
 
656
 
 
657
  async def detect_multiple_dogs(image, conf_threshold=0.3, iou_threshold=0.55):
658
+ try:
659
+ results = model_yolo(image, conf=conf_threshold, iou=iou_threshold)[0]
660
+ dogs = []
661
+ boxes = []
662
+ for box in results.boxes:
663
+ if box.cls == 16: # COCO dataset class for dog is 16
664
+ xyxy = box.xyxy[0].tolist()
665
+ confidence = box.conf.item()
666
+ boxes.append((xyxy, confidence))
667
+
668
+ if not boxes:
669
+ dogs.append((image, 1.0, [0, 0, image.width, image.height]))
670
+ else:
671
+ nms_boxes = non_max_suppression(boxes, iou_threshold)
672
+
673
+ for box, confidence in nms_boxes:
674
+ x1, y1, x2, y2 = box
675
+ w, h = x2 - x1, y2 - y1
676
+ x1 = max(0, x1 - w * 0.05)
677
+ y1 = max(0, y1 - h * 0.05)
678
+ x2 = min(image.width, x2 + w * 0.05)
679
+ y2 = min(image.height, y2 + h * 0.05)
680
+ cropped_image = image.crop((x1, y1, x2, y2))
681
+ dogs.append((cropped_image, confidence, [x1, y1, x2, y2]))
682
+
683
+ return dogs
684
+
685
+ except RuntimeError as e:
686
+ if "out of memory" in str(e):
687
+ logger.warning("GPU memory exceeded, falling back to CPU")
688
+ device_mgr._current_device = torch.device('cpu')
689
+ # 重新嘗試檢測
690
+ return await detect_multiple_dogs(image, conf_threshold, iou_threshold)
691
+ raise e
692
+
693
 
694
  def non_max_suppression(boxes, iou_threshold):
695
  keep = []