DawnC commited on
Commit
565c518
β€’
1 Parent(s): ad6fb2f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -0
app.py CHANGED
@@ -252,17 +252,35 @@ def get_akc_breeds_link():
252
 
253
 
254
  def detect_dogs(image):
 
255
  results = model_yolo(image)
 
 
 
 
256
  dogs = []
257
  for result in results:
 
 
258
  for box in result.boxes:
 
 
 
259
  if box.cls == 16: # COCO θ³‡ζ–™ι›†δΈ­ηš„η‹—ι‘žεˆ₯是 16
260
  xyxy = box.xyxy[0].tolist()
261
  confidence = box.conf.item()
 
 
 
 
262
  cropped_image = image.crop((xyxy[0], xyxy[1], xyxy[2], xyxy[3]))
263
  dogs.append((cropped_image, confidence, xyxy))
 
 
 
264
  return dogs
265
 
 
266
  def predict_breed(cropped_image):
267
  image_tensor = preprocess_image(cropped_image)
268
  with torch.no_grad():
 
252
 
253
 
254
  def detect_dogs(image):
255
+ # 使用 YOLO ζ¨‘εž‹ι€²θ‘Œε΅ζΈ¬
256
  results = model_yolo(image)
257
+
258
+ # 打印 YOLO 偡測硐果
259
+ print(f"YOLO detection results: {results}")
260
+
261
  dogs = []
262
  for result in results:
263
+ # ζ‰“ε°ζ―ε€‹η΅ζžœ
264
+ print(f"Result: {result}")
265
  for box in result.boxes:
266
+ # ζ‰“ε°ι‚Šη•Œζ‘†ηš„ι‘žεˆ₯ε’ŒεΊ§ζ¨™
267
+ print(f"Detected class: {box.cls}, Confidence: {box.conf}, Box coordinates: {box.xyxy}")
268
+
269
  if box.cls == 16: # COCO θ³‡ζ–™ι›†δΈ­ηš„η‹—ι‘žεˆ₯是 16
270
  xyxy = box.xyxy[0].tolist()
271
  confidence = box.conf.item()
272
+
273
+ # η’Ίθͺεœ–η‰‡θ£εˆ‡ιŽη¨‹ζ­£η’Ί
274
+ print(f"Cropping image at coordinates: {xyxy}")
275
+
276
  cropped_image = image.crop((xyxy[0], xyxy[1], xyxy[2], xyxy[3]))
277
  dogs.append((cropped_image, confidence, xyxy))
278
+
279
+ # ζœ€εΎŒζ‰“ε°ε΅ζΈ¬εˆ°ηš„η‹—ηš„ζ•Έι‡
280
+ print(f"Number of dogs detected: {len(dogs)}")
281
  return dogs
282
 
283
+
284
  def predict_breed(cropped_image):
285
  image_tensor = preprocess_image(cropped_image)
286
  with torch.no_grad():