elliesleightholm
commited on
Commit
β’
27a272e
1
Parent(s):
ac1e63c
updating links
Browse files
app.py
CHANGED
@@ -225,27 +225,8 @@ def classify_image(
|
|
225 |
|
226 |
with gr.Blocks() as demo:
|
227 |
gr.Markdown("# Image Classification with Taxonomy Mapping")
|
228 |
-
gr.Markdown("""
|
229 |
-
<div style="display: flex; gap: 10px;">
|
230 |
-
<a href="https://www.marqo.ai/blog/introducing-marqos-ecommerce-embedding-models">
|
231 |
-
<img src="https://img.shields.io/badge/Model_Release-Blog-blue?logo=font-awesome&logoColor=white&style=flat&logo=pencil-alt" alt="Blog">
|
232 |
-
</a>
|
233 |
-
<a href="https://huggingface.co/collections/Marqo/marqo-ecommerce-embeddings-66f611b9bb9d035a8d164fbb">
|
234 |
-
<img src="https://img.shields.io/badge/π€-Hugging_Face-yellow?logo=hugging-face&logoColor=white&style=flat&logo=pencil-alt" alt="Hugging Face">
|
235 |
-
</a>
|
236 |
-
<a href="https://www.marqo.ai/blog/how-to-build-an-ecommerce-image-search-application">
|
237 |
-
<img src="https://img.shields.io/badge/Ecommerce_Search_-Blog-red?logo=font-awesome&logoColor=white&style=flat&logo=pencil-alt" alt="Blog">
|
238 |
-
</a>
|
239 |
-
<a href="https://colab.research.google.com/drive/1Syl9Gde6LguyMjlpDANetbpvBNb9ddxw?usp=sharing">
|
240 |
-
<img src="https://img.shields.io/badge/π€_-Open_In_Google_Colab-orange?logo=font-awesome&logoColor=white&style=flat&logo=pencil-alt" alt="Colab">
|
241 |
-
</a>
|
242 |
-
<a href="https://join.slack.com/t/marqo-community/shared_invite/zt-2b4nsvbd2-TDf8agPszzWH5hYKBMIgDA">
|
243 |
-
<img src="https://img.shields.io/badge/Slack-Join_Marqo_Community-purple?logo=Slack" alt="Slack Community">
|
244 |
-
</a>
|
245 |
-
</div>
|
246 |
-
""")
|
247 |
gr.Markdown(
|
248 |
-
"## How to use this app\n\nThis app compares Marqo's Ecommerce embeddings to OpenAI's ViT-B-16 CLIP model for Ecommerce taxonomy mapping. A beam search is used to find the correct classification in the taxonomy. The original OpenAI CLIP models perform very poorly on Ecommerce data."
|
249 |
)
|
250 |
gr.Markdown(
|
251 |
"Upload an image, provide an image URL, or select an example image, select the model size, and get the taxonomy mapping. The taxonomy is based on the Amazon product taxonomy."
|
|
|
225 |
|
226 |
with gr.Blocks() as demo:
|
227 |
gr.Markdown("# Image Classification with Taxonomy Mapping")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
228 |
gr.Markdown(
|
229 |
+
"## How to use this app\n\nThis app compares [Marqo's Ecommerce embeddings](https://huggingface.co/collections/Marqo/marqo-ecommerce-embeddings-66f611b9bb9d035a8d164fbb) to OpenAI's ViT-B-16 CLIP model for Ecommerce taxonomy mapping. A beam search is used to find the correct classification in the taxonomy. The original OpenAI CLIP models perform very poorly on Ecommerce data."
|
230 |
)
|
231 |
gr.Markdown(
|
232 |
"Upload an image, provide an image URL, or select an example image, select the model size, and get the taxonomy mapping. The taxonomy is based on the Amazon product taxonomy."
|