SkalskiP commited on
Commit
174b6f3
1 Parent(s): 43903ef

YOLO-ARENA with YOLOv10

Browse files
Files changed (1) hide show
  1. app.py +21 -46
app.py CHANGED
@@ -8,37 +8,27 @@ from inference import get_model
8
  MARKDOWN = """
9
  <h1 style='text-align: center'>YOLO-ARENA 🏟️</h1>
10
 
11
- Welcome to YOLO-Arena! This demo showcases the performance of various YOLO models:
 
12
 
13
  - YOLOv8 [[code](https://github.com/ultralytics/ultralytics)]
14
  - YOLOv9 [[code](https://github.com/WongKinYiu/yolov9)]
15
  - YOLOv10 [[code](https://github.com/THU-MIG/yolov10)]
16
- - YOLO-NAS [[code](https://github.com/Deci-AI/super-gradients/blob/master/YOLONAS.md)]
17
 
18
  Powered by Roboflow [Inference](https://github.com/roboflow/inference) and
19
- [Supervision](https://github.com/roboflow/supervision).
20
  """
21
 
22
  IMAGE_EXAMPLES = [
23
- ['https://media.roboflow.com/dog.jpeg', 0.4]
 
 
24
  ]
25
 
26
- YOLO_V8_MODEL = get_model(model_id="yolov8m-640")
27
- YOLO_NAS_MODEL = get_model(model_id="coco/15")
28
  YOLO_V9_MODEL = get_model(model_id="coco/17")
29
  YOLO_V10_MODEL = get_model(model_id="coco/22")
30
 
31
- YOLO_NAS_TO_COCO_CLASS_ID_MAPPING = {
32
- 49: 0, 9: 1, 18: 2, 44: 3, 0: 4, 16: 5, 73: 6, 74: 7, 11: 8, 72: 9, 31: 10, 63: 11,
33
- 48: 12, 8: 13, 10: 14, 20: 15, 28: 16, 37: 17, 56: 18, 25: 19, 30: 20, 6: 21,
34
- 79: 22, 34: 23, 2: 24, 76: 25, 36: 26, 68: 27, 64: 28, 33: 29, 59: 30, 60: 31,
35
- 62: 32, 40: 33, 4: 34, 5: 35, 58: 36, 65: 37, 67: 38, 13: 39, 78: 40, 26: 41,
36
- 32: 42, 41: 43, 61: 44, 14: 45, 3: 46, 1: 47, 54: 48, 46: 49, 15: 50, 19: 51,
37
- 38: 52, 50: 53, 29: 54, 17: 55, 22: 56, 24: 57, 51: 58, 7: 59, 27: 60, 70: 61,
38
- 75: 62, 42: 63, 45: 64, 53: 65, 39: 66, 21: 67, 43: 68, 47: 69, 69: 70, 57: 71,
39
- 52: 72, 12: 73, 23: 74, 77: 75, 55: 76, 66: 77, 35: 78, 71: 79
40
- }
41
-
42
  LABEL_ANNOTATORS = sv.LabelAnnotator(text_color=sv.Color.black())
43
  BOUNDING_BOX_ANNOTATORS = sv.BoundingBoxAnnotator()
44
 
@@ -82,15 +72,9 @@ def process_image(
82
  input_image: np.ndarray,
83
  confidence_threshold: float,
84
  iou_threshold: float
85
- ) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
86
  yolo_v8_annotated_image = detect_and_annotate(
87
  YOLO_V8_MODEL, input_image, confidence_threshold, iou_threshold)
88
- yolo_nas_annotated_image = detect_and_annotate(
89
- YOLO_NAS_MODEL,
90
- input_image,
91
- confidence_threshold,
92
- iou_threshold,
93
- YOLO_NAS_TO_COCO_CLASS_ID_MAPPING)
94
  yolo_v9_annotated_image = detect_and_annotate(
95
  YOLO_V9_MODEL, input_image, confidence_threshold, iou_threshold)
96
  yolo_10_annotated_image = detect_and_annotate(
@@ -98,7 +82,6 @@ def process_image(
98
 
99
  return (
100
  yolo_v8_annotated_image,
101
- yolo_nas_annotated_image,
102
  yolo_v9_annotated_image,
103
  yolo_10_annotated_image
104
  )
@@ -142,25 +125,19 @@ with gr.Blocks() as demo:
142
  type='numpy',
143
  label='Input'
144
  )
145
- with gr.Column():
146
- with gr.Row():
147
- yolo_v8_output_image_component = gr.Image(
148
- type='numpy',
149
- label='YOLOv8m @ 640x640'
150
- )
151
- yolo_nas_output_image_component = gr.Image(
152
- type='numpy',
153
- label='YOLO-NAS M @ 640x640'
154
- )
155
- with gr.Row():
156
- yolo_v9_output_image_component = gr.Image(
157
- type='numpy',
158
- label='YOLOv9c @ 640x640'
159
- )
160
- yolo_v10_output_image_component = gr.Image(
161
- type='numpy',
162
- label='YOLOv10m @ 640x640'
163
- )
164
  submit_button_component = gr.Button(
165
  value='Submit',
166
  scale=1,
@@ -176,7 +153,6 @@ with gr.Blocks() as demo:
176
  ],
177
  outputs=[
178
  yolo_v8_output_image_component,
179
- yolo_nas_output_image_component,
180
  yolo_v9_output_image_component,
181
  yolo_v10_output_image_component
182
  ]
@@ -191,7 +167,6 @@ with gr.Blocks() as demo:
191
  ],
192
  outputs=[
193
  yolo_v8_output_image_component,
194
- yolo_nas_output_image_component,
195
  yolo_v9_output_image_component,
196
  yolo_v10_output_image_component
197
  ]
 
8
  MARKDOWN = """
9
  <h1 style='text-align: center'>YOLO-ARENA 🏟️</h1>
10
 
11
+ Welcome to YOLO-Arena! This demo showcases the performance of various YOLO models
12
+ pre-trained on the COCO dataset.
13
 
14
  - YOLOv8 [[code](https://github.com/ultralytics/ultralytics)]
15
  - YOLOv9 [[code](https://github.com/WongKinYiu/yolov9)]
16
  - YOLOv10 [[code](https://github.com/THU-MIG/yolov10)]
 
17
 
18
  Powered by Roboflow [Inference](https://github.com/roboflow/inference) and
19
+ [Supervision](https://github.com/roboflow/supervision). 🔥
20
  """
21
 
22
  IMAGE_EXAMPLES = [
23
+ ['https://media.roboflow.com/supervision/image-examples/people-walking.png', 0.4],
24
+ ['https://media.roboflow.com/supervision/image-examples/vehicles.png', 0.4],
25
+ ['https://media.roboflow.com/supervision/image-examples/basketball-1.png', 0.4],
26
  ]
27
 
28
+ YOLO_V8_MODEL = get_model(model_id="coco/8")
 
29
  YOLO_V9_MODEL = get_model(model_id="coco/17")
30
  YOLO_V10_MODEL = get_model(model_id="coco/22")
31
 
 
 
 
 
 
 
 
 
 
 
 
32
  LABEL_ANNOTATORS = sv.LabelAnnotator(text_color=sv.Color.black())
33
  BOUNDING_BOX_ANNOTATORS = sv.BoundingBoxAnnotator()
34
 
 
72
  input_image: np.ndarray,
73
  confidence_threshold: float,
74
  iou_threshold: float
75
+ ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
76
  yolo_v8_annotated_image = detect_and_annotate(
77
  YOLO_V8_MODEL, input_image, confidence_threshold, iou_threshold)
 
 
 
 
 
 
78
  yolo_v9_annotated_image = detect_and_annotate(
79
  YOLO_V9_MODEL, input_image, confidence_threshold, iou_threshold)
80
  yolo_10_annotated_image = detect_and_annotate(
 
82
 
83
  return (
84
  yolo_v8_annotated_image,
 
85
  yolo_v9_annotated_image,
86
  yolo_10_annotated_image
87
  )
 
125
  type='numpy',
126
  label='Input'
127
  )
128
+ yolo_v8_output_image_component = gr.Image(
129
+ type='numpy',
130
+ label='YOLOv8'
131
+ )
132
+ with gr.Row():
133
+ yolo_v9_output_image_component = gr.Image(
134
+ type='numpy',
135
+ label='YOLOv9'
136
+ )
137
+ yolo_v10_output_image_component = gr.Image(
138
+ type='numpy',
139
+ label='YOLOv10'
140
+ )
 
 
 
 
 
 
141
  submit_button_component = gr.Button(
142
  value='Submit',
143
  scale=1,
 
153
  ],
154
  outputs=[
155
  yolo_v8_output_image_component,
 
156
  yolo_v9_output_image_component,
157
  yolo_v10_output_image_component
158
  ]
 
167
  ],
168
  outputs=[
169
  yolo_v8_output_image_component,
 
170
  yolo_v9_output_image_component,
171
  yolo_v10_output_image_component
172
  ]