Spaces:
Running
Running
File size: 4,092 Bytes
fabd282 7ba815c fabd282 3ba9a9b fabd282 7ba815c fabd282 7ba815c fabd282 3ba9a9b fabd282 3ba9a9b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 |
import streamlit as st
from st_utils import bm25_search, semantic_search, hf_api, paginator
from huggingface_hub import ModelSearchArguments
import webbrowser
from numerize.numerize import numerize
import math
st.set_page_config(
page_title="HF Search Engine",
page_icon="π",
layout="wide",
initial_sidebar_state="auto",
)
### SIDEBAR
search_backend = st.sidebar.selectbox(
"Search method",
["semantic", "bm25", "hfapi"],
format_func=lambda x: {"hfapi": "Keyword search", "bm25": "BM25 search", "semantic": "Semantic Search"}[x],
)
limit_results = int(st.sidebar.number_input("Limit results", min_value=0, value=10))
st.sidebar.markdown("# Filters")
args = ModelSearchArguments()
library = st.sidebar.multiselect(
"Library", args.library.values(), format_func=lambda x: {v: k for k, v in args.library.items()}[x]
)
task = st.sidebar.multiselect(
"Task", args.pipeline_tag.values(), format_func=lambda x: {v: k for k, v in args.pipeline_tag.items()}[x]
)
### MAIN PAGE
st.markdown(
"<h1 style='text-align: center; '>ππ€ HF Search Engine</h1>",
unsafe_allow_html=True,
)
# Search bar
search_query = st.text_input("Search for a model in HuggingFace", value="", max_chars=None, key=None, type="default")
if search_query != "":
filters = {
"library": library,
"task": task,
}
if search_backend == "hfapi":
res = hf_api(search_query, limit_results, filters)
elif search_backend == "semantic":
res = semantic_search(search_query, limit_results, filters)
elif search_backend == "bm25":
res = bm25_search(search_query, limit_results, filters)
hit_list, hits_count = res["hits"], res["count"]
hit_list = [
{
"modelId": hit["modelId"],
"tags": hit["tags"],
"downloads": hit["downloads"],
"likes": hit["likes"],
"readme": hit.get("readme", None),
}
for hit in hit_list
]
if hit_list:
st.write(f"Search results ({hits_count}):")
if hits_count > 100:
shown_results = 100
else:
shown_results = hits_count
for i, hit in paginator(
f"Select results (showing {shown_results} of {hits_count} results)",
hit_list,
):
col1, col2, col3 = st.columns([5, 1, 1])
col1.metric("Model", hit["modelId"])
col2.metric("NΒ° downloads", numerize(hit["downloads"]) if not math.isnan(hit["downloads"]) else "N/A")
col3.metric("NΒ° likes", numerize(hit["likes"]) if not math.isnan(hit["likes"]) else "N/A")
st.button(
f"View model on π€",
on_click=lambda hit=hit: webbrowser.open(f"https://huggingface.co/{hit['modelId']}"),
key=f"{i}-{hit['modelId']}",
)
st.write(f"**Tags:** {' β’ '.join(hit['tags'])}")
if hit["readme"]:
with st.expander("See README"):
st.write(hit["readme"])
# TODO: embed huggingface spaces
# import streamlit.components.v1 as components
# components.html(
# f"""
# <link rel="stylesheet" href="https://gradio.s3-us-west-2.amazonaws.com/2.6.2/static/bundle.css">
# <div id="target"></div>
# <script src="https://gradio.s3-us-west-2.amazonaws.com/2.6.2/static/bundle.js"></script>
# <script>
# launchGradioFromSpaces("abidlabs/question-answering", "#target")
# </script>
# """,
# height=400,
# )
st.markdown("---")
else:
st.write(f"No Search results, please try again with different keywords")
st.markdown(
"<h6 style='text-align: center; color: #808080;'>Made with β€οΈ By <a href='https://github.com/NouamaneTazi'>Nouamane</a> - Checkout complete project <a href='https://github.com/NouamaneTazi/hf_search'>here</a></h6>",
unsafe_allow_html=True,
) |