|
import os
|
|
if os.environ.get("SPACES_ZERO_GPU") is not None:
|
|
import spaces
|
|
else:
|
|
class spaces:
|
|
@staticmethod
|
|
def GPU(func):
|
|
def wrapper(*args, **kwargs):
|
|
return func(*args, **kwargs)
|
|
return wrapper
|
|
import gradio as gr
|
|
import subprocess
|
|
from huggingface_hub import HfApi
|
|
|
|
@spaces.GPU
|
|
def infer(filter: str, sort: str, sort_dir: bool, infer: str, gated: str, appr: list[str]):
|
|
try:
|
|
api = HfApi()
|
|
kwargs = {}
|
|
if filter: kwargs["filter"] = filter
|
|
if gated == "gated": kwargs["gated"] = True
|
|
elif gated == "non-gated": kwargs["gated"] = False
|
|
if sort_dir: kwargs["direction"] = -1
|
|
models = api.list_models(inference=infer, sort=sort, cardData=True, **kwargs)
|
|
md = "### Results:\n"
|
|
for model in models:
|
|
if model.gated and model.gated not in appr: continue
|
|
md += "1. "
|
|
md += f"[{model.id}](https://hf.co/{model.id})"
|
|
md += f" Inference: '{infer}'"
|
|
|
|
|
|
md += f" Gated: '{gated}'"
|
|
if model.library_name: md += f" Lib:'{model.library_name}'"
|
|
if model.pipeline_tag: md += f" Pipeline:'{model.pipeline_tag}'"
|
|
if model.last_modified: md += f" LastMod:'{model.last_modified}'"
|
|
if model.likes: md += f" Likes:'{model.likes}'"
|
|
if model.downloads: md += f" DLs:'{model.downloads}'"
|
|
if model.downloads_all_time: md += f" AllDLs:'{model.downloads_all_time}'"
|
|
md += "\n"
|
|
return md
|
|
except Exception as e:
|
|
raise gr.Error(e)
|
|
|
|
with gr.Blocks() as demo:
|
|
filter = gr.Textbox(label="Query", value="")
|
|
with gr.Row(equal_height=True):
|
|
infer_status = gr.Radio(label="Inference status", choices=["warm", "cold", "frozen"], value="warm")
|
|
gated_status = gr.Radio(label="Gated status", choices=["gated", "non-gated", "all"], value="non-gated")
|
|
sort = gr.Radio(label="Sort", choices=["last_modified", "likes", "downloads"], value="likes")
|
|
sort_dir = gr.Checkbox(label="Sort by descending order", value=False)
|
|
appr_status = gr.CheckboxGroup(label="Approval method", choices=["auto", "manual"], value=["auto", "manual"], visible=False)
|
|
|
|
run_button = gr.Button("Search", variant="primary")
|
|
|
|
output_md = gr.Markdown("<br><br>")
|
|
|
|
run_button.click(infer, [filter, sort, sort_dir, infer_status, gated_status, appr_status], [output_md])
|
|
|
|
demo.launch()
|
|
|