Spaces:
Running
on
Zero
Running
on
Zero
rizavelioglu
commited on
Commit
•
03018e3
1
Parent(s):
0f050fd
add examples from Fashionpedia-test & supress warnings from ONNX
Browse files- app.py +28 -14
- examples/0a4f8205a3b58e70eec99fbbb9422d08.jpg +0 -0
- examples/0a72e0f76ab9b75945f5d610508f9336.jpg +0 -0
- examples/0a939e0e67011aecf7195c17ecb9733c.jpg +0 -0
- examples/adi_10266_5.jpg +0 -0
- adi_103_6.jpg → examples/adi_103_6.jpg +0 -0
- adi_1201_2.jpg → examples/adi_1201_2.jpg +0 -0
- adi_2149_5.jpg → examples/adi_2149_5.jpg +0 -0
- adi_5476_3.jpg → examples/adi_5476_3.jpg +0 -0
- adi_5641_4.jpg → examples/adi_5641_4.jpg +0 -0
- examples/adi_9086_5.jpg +0 -0
- examples/adi_9704_1.jpg +0 -0
app.py
CHANGED
@@ -134,7 +134,7 @@ def draw_predictions(
|
|
134 |
return imgs_list
|
135 |
|
136 |
|
137 |
-
@spaces.GPU(duration=
|
138 |
def inference(image, model_name, mask_threshold, bbox_threshold):
|
139 |
"""
|
140 |
Load the ONNX model and run inference with the provided input `image`. Visualize the predictions and save them in a
|
@@ -150,9 +150,14 @@ def inference(image, model_name, mask_threshold, bbox_threshold):
|
|
150 |
repo_id="rizavelioglu/fashionfail",
|
151 |
filename="facere_plus.onnx" if model_name == "facere+" else "facere_base.onnx"
|
152 |
)
|
|
|
|
|
|
|
153 |
# Create an inference session.
|
154 |
ort_session = onnxruntime.InferenceSession(
|
155 |
-
path_onnx,
|
|
|
|
|
156 |
)
|
157 |
|
158 |
# compute ONNX Runtime output prediction
|
@@ -175,7 +180,7 @@ Failure Cases in Fashion Object Detection and Segmentation</a>. <br>Upload your
|
|
175 |
from the dropdown menu—either `Facere` or `Facere+` <br> Check out the <a
|
176 |
href="https://rizavelioglu.github.io/fashionfail/">project page</a> for more information."""
|
177 |
article = r"""
|
178 |
-
Example images are sampled from the `FashionFail-test` set, which the models did not see during training.
|
179 |
|
180 |
<br>**Citation** <br>If you find our work useful in your research, please consider giving a star ⭐ and
|
181 |
a citation:
|
@@ -191,16 +196,25 @@ a citation:
|
|
191 |
"""
|
192 |
|
193 |
examples = [
|
194 |
-
["
|
195 |
-
["
|
196 |
-
["
|
197 |
-
["
|
198 |
-
["
|
199 |
-
["
|
200 |
-
["
|
201 |
-
["
|
202 |
-
["
|
203 |
-
["
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
204 |
]
|
205 |
|
206 |
demo = gr.Interface(
|
@@ -223,7 +237,7 @@ demo = gr.Interface(
|
|
223 |
description=description,
|
224 |
article=article,
|
225 |
examples=examples,
|
226 |
-
cache_examples=
|
227 |
examples_per_page=6
|
228 |
)
|
229 |
|
|
|
134 |
return imgs_list
|
135 |
|
136 |
|
137 |
+
@spaces.GPU(duration=20)
|
138 |
def inference(image, model_name, mask_threshold, bbox_threshold):
|
139 |
"""
|
140 |
Load the ONNX model and run inference with the provided input `image`. Visualize the predictions and save them in a
|
|
|
150 |
repo_id="rizavelioglu/fashionfail",
|
151 |
filename="facere_plus.onnx" if model_name == "facere+" else "facere_base.onnx"
|
152 |
)
|
153 |
+
# Session options (see https://github.com/microsoft/onnxruntime/issues/14694#issuecomment-1598429295)
|
154 |
+
sess_options = onnxruntime.SessionOptions()
|
155 |
+
sess_options.graph_optimization_level = onnxruntime.GraphOptimizationLevel.ORT_DISABLE_ALL
|
156 |
# Create an inference session.
|
157 |
ort_session = onnxruntime.InferenceSession(
|
158 |
+
path_onnx,
|
159 |
+
providers=["CUDAExecutionProvider", "CPUExecutionProvider"],
|
160 |
+
sess_options=sess_options,
|
161 |
)
|
162 |
|
163 |
# compute ONNX Runtime output prediction
|
|
|
180 |
from the dropdown menu—either `Facere` or `Facere+` <br> Check out the <a
|
181 |
href="https://rizavelioglu.github.io/fashionfail/">project page</a> for more information."""
|
182 |
article = r"""
|
183 |
+
Example images are sampled from the `Fashionpedia-test` and `FashionFail-test` set, which the models did not see during training.
|
184 |
|
185 |
<br>**Citation** <br>If you find our work useful in your research, please consider giving a star ⭐ and
|
186 |
a citation:
|
|
|
196 |
"""
|
197 |
|
198 |
examples = [
|
199 |
+
["examples/0a4f8205a3b58e70eec99fbbb9422d08.jpg", "facere", 0.5, 0.7],
|
200 |
+
["examples/0a72e0f76ab9b75945f5d610508f9336.jpg", "facere", 0.5, 0.7],
|
201 |
+
["examples/0a939e0e67011aecf7195c17ecb9733c.jpg", "facere", 0.5, 0.7],
|
202 |
+
["examples/adi_9086_5.jpg", "facere", 0.5, 0.5],
|
203 |
+
["examples/adi_9086_5.jpg", "facere+", 0.5, 0.5],
|
204 |
+
["examples/adi_9704_1.jpg", "facere", 0.5, 0.5],
|
205 |
+
["examples/adi_9704_1.jpg", "facere+", 0.5, 0.5],
|
206 |
+
["examples/adi_10266_5.jpg", "facere", 0.5, 0.5],
|
207 |
+
["examples/adi_10266_5.jpg", "facere+", 0.5, 0.5],
|
208 |
+
["examples/adi_103_6.jpg", "facere", 0.5, 0.5],
|
209 |
+
["examples/adi_103_6.jpg", "facere+", 0.5, 0.5],
|
210 |
+
["examples/adi_1201_2.jpg", "facere", 0.5, 0.7],
|
211 |
+
["examples/adi_1201_2.jpg", "facere+", 0.5, 0.7],
|
212 |
+
["examples/adi_2149_5.jpg", "facere", 0.5, 0.7],
|
213 |
+
["examples/adi_2149_5.jpg", "facere+", 0.5, 0.7],
|
214 |
+
["examples/adi_5476_3.jpg", "facere", 0.5, 0.7],
|
215 |
+
["examples/adi_5476_3.jpg", "facere+", 0.5, 0.7],
|
216 |
+
["examples/adi_5641_4.jpg", "facere", 0.5, 0.7],
|
217 |
+
["examples/adi_5641_4.jpg", "facere+", 0.5, 0.7]
|
218 |
]
|
219 |
|
220 |
demo = gr.Interface(
|
|
|
237 |
description=description,
|
238 |
article=article,
|
239 |
examples=examples,
|
240 |
+
cache_examples=False,
|
241 |
examples_per_page=6
|
242 |
)
|
243 |
|
examples/0a4f8205a3b58e70eec99fbbb9422d08.jpg
ADDED
examples/0a72e0f76ab9b75945f5d610508f9336.jpg
ADDED
examples/0a939e0e67011aecf7195c17ecb9733c.jpg
ADDED
examples/adi_10266_5.jpg
ADDED
adi_103_6.jpg → examples/adi_103_6.jpg
RENAMED
File without changes
|
adi_1201_2.jpg → examples/adi_1201_2.jpg
RENAMED
File without changes
|
adi_2149_5.jpg → examples/adi_2149_5.jpg
RENAMED
File without changes
|
adi_5476_3.jpg → examples/adi_5476_3.jpg
RENAMED
File without changes
|
adi_5641_4.jpg → examples/adi_5641_4.jpg
RENAMED
File without changes
|
examples/adi_9086_5.jpg
ADDED
examples/adi_9704_1.jpg
ADDED