Spaces:
Sleeping
Sleeping
import os | |
import random | |
import base64 | |
import requests | |
from selenium import webdriver | |
from selenium.webdriver.support.ui import WebDriverWait | |
from selenium.webdriver.support import expected_conditions as EC | |
from selenium.webdriver.common.by import By | |
from selenium.common.exceptions import WebDriverException, TimeoutException | |
from PIL import Image | |
from io import BytesIO | |
from datetime import datetime | |
import gradio as gr | |
from typing import Tuple | |
import time | |
from pathlib import Path # μΆκ° | |
# μ€ν¬λ¦°μ· μΊμ λλ ν 리 μ€μ | |
CACHE_DIR = Path("screenshot_cache") | |
CACHE_DIR.mkdir(exist_ok=True) | |
# μ μ λ³μλ‘ μ€ν¬λ¦°μ· μΊμ μ μΈ | |
SCREENSHOT_CACHE = {} | |
def get_cached_screenshot(url: str) -> str: | |
"""μΊμλ μ€ν¬λ¦°μ· κ°μ Έμ€κΈ° λλ μλ‘ μμ±""" | |
cache_file = CACHE_DIR / f"{base64.b64encode(url.encode()).decode()}.png" | |
if cache_file.exists(): | |
with open(cache_file, "rb") as f: | |
return base64.b64encode(f.read()).decode() | |
return take_screenshot(url) | |
def take_screenshot(url): | |
"""μΉμ¬μ΄νΈ μ€ν¬λ¦°μ· 촬μ ν¨μ (λ‘λ© λκΈ° μκ° μΆκ°)""" | |
if url in SCREENSHOT_CACHE: | |
return SCREENSHOT_CACHE[url] | |
if not url.startswith('http'): | |
url = f"https://{url}" | |
options = webdriver.ChromeOptions() | |
options.add_argument('--headless') | |
options.add_argument('--no-sandbox') | |
options.add_argument('--disable-dev-shm-usage') | |
options.add_argument('--window-size=1080,720') | |
try: | |
driver = webdriver.Chrome(options=options) | |
driver.get(url) | |
# λͺ μμ λκΈ°: body μμκ° λ‘λλ λκΉμ§ λκΈ° (μ΅λ 10μ΄) | |
try: | |
WebDriverWait(driver, 10).until( | |
EC.presence_of_element_located((By.TAG_NAME, "body")) | |
) | |
except TimeoutException: | |
print(f"νμ΄μ§ λ‘λ© νμμμ: {url}") | |
# μΆκ° λκΈ° μκ°μ 2μ΄λ‘ μ¦κ° | |
time.sleep(2) # 1μ΄μμ 2μ΄λ‘ λ³κ²½ | |
# JavaScript μ€ν μλ£ λκΈ° | |
driver.execute_script("return document.readyState") == "complete" | |
# μ€ν¬λ¦°μ· 촬μ | |
screenshot = driver.get_screenshot_as_png() | |
img = Image.open(BytesIO(screenshot)) | |
buffered = BytesIO() | |
img.save(buffered, format="PNG") | |
base64_image = base64.b64encode(buffered.getvalue()).decode() | |
# μΊμμ μ μ₯ | |
SCREENSHOT_CACHE[url] = base64_image | |
return base64_image | |
except WebDriverException as e: | |
print(f"μ€ν¬λ¦°μ· 촬μ μ€ν¨: {str(e)} for URL: {url}") | |
return None | |
except Exception as e: | |
print(f"μμμΉ λͺ»ν μ€λ₯: {str(e)} for URL: {url}") | |
return None | |
finally: | |
if 'driver' in locals(): | |
driver.quit() | |
from datetime import datetime, timedelta | |
def calculate_rising_rate(created_date: str, rank: int) -> int: | |
"""AI Rising Rate κ³μ°""" | |
# μμ±μΌ κΈ°μ€ μ μ κ³μ° | |
created = datetime.strptime(created_date.split('T')[0], '%Y-%m-%d') | |
today = datetime.now() | |
days_diff = (today - created).days | |
date_score = max(0, 300 - days_diff) # μ΅λ 300μ | |
# μμ κΈ°μ€ μ μ κ³μ° | |
rank_score = max(0, 300 - rank) # μ΅λ 300μ | |
# μ΄μ κ³μ° | |
total_score = date_score + rank_score | |
# λ³ κ°μ κ³μ° (0~5) | |
if total_score <= 100: | |
stars = 1 | |
elif total_score <= 200: | |
stars = 2 | |
elif total_score <= 300: | |
stars = 3 | |
elif total_score <= 400: | |
stars = 4 | |
else: | |
stars = 5 | |
return stars | |
def get_popularity_grade(likes: int, stars: int) -> tuple: | |
"""AI Popularity Score λ±κΈ κ³μ°""" | |
# κΈ°λ³Έ μ μ (likes) | |
base_score = min(likes, 10000) # μ΅λ 10000μ | |
# λ³μ μΆκ° μ μ (λ³ νλλΉ 500μ ) | |
star_score = stars * 500 | |
# μ΄μ | |
total_score = base_score + star_score | |
# λ±κΈ ν μ΄λΈ (18λ¨κ³) | |
grades = [ | |
(9000, "AAA+"), (8500, "AAA"), (8000, "AAA-"), | |
(7500, "AA+"), (7000, "AA"), (6500, "AA-"), | |
(6000, "A+"), (5500, "A"), (5000, "A-"), | |
(4500, "BBB+"), (4000, "BBB"), (3500, "BBB-"), | |
(3000, "BB+"), (2500, "BB"), (2000, "BB-"), | |
(1500, "B+"), (1000, "B"), (500, "B-") | |
] | |
for threshold, grade in grades: | |
if total_score >= threshold: | |
return grade, total_score | |
return "B-", total_score | |
# get_card ν¨μ λ΄μ hardware_info λΆλΆμ λ€μμΌλ‘ κ΅μ²΄: | |
def get_rating_info(item: dict, index: int) -> str: | |
"""νκ° μ 보 HTML μμ±""" | |
created = item.get('createdAt', '').split('T')[0] | |
likes = int(str(item.get('likes', '0')).replace(',', '')) | |
# AI Rising Rate κ³μ° | |
stars = calculate_rising_rate(created, index + 1) | |
star_html = "β " * stars + "β" * (5 - stars) # μ±μμ§ λ³κ³Ό λΉ λ³ μ‘°ν© | |
# AI Popularity Score κ³μ° | |
grade, score = get_popularity_grade(likes, stars) | |
# λ±κΈλ³ μμ μ€μ | |
grade_colors = { | |
'AAA': '#FFD700', 'AA': '#FFA500', 'A': '#FF4500', | |
'BBB': '#4169E1', 'BB': '#1E90FF', 'B': '#00BFFF' | |
} | |
grade_base = grade.rstrip('+-') | |
grade_color = grade_colors.get(grade_base, '#666666') | |
return f""" | |
<div style=' | |
margin-top: 15px; | |
padding: 15px; | |
background: rgba(255,255,255,0.4); | |
border-radius: 10px; | |
font-size: 0.9em; | |
box-shadow: 0 2px 10px rgba(0,0,0,0.1);'> | |
<div style=' | |
display: grid; | |
grid-template-columns: repeat(2, 1fr); | |
gap: 15px;'> | |
<div style=' | |
color: #333; | |
display: flex; | |
flex-direction: column; | |
gap: 5px;'> | |
<span style='font-weight: bold;'>AI Rising Rate:</span> | |
<span style=' | |
color: #FF8C00; | |
font-size: 1.4em; | |
letter-spacing: 2px; | |
text-shadow: 1px 1px 2px rgba(0,0,0,0.1);'>{star_html}</span> | |
</div> | |
<div style=' | |
color: #333; | |
display: flex; | |
flex-direction: column; | |
gap: 5px;'> | |
<span style='font-weight: bold;'>AI Popularity Score:</span> | |
<span style=' | |
font-size: 1.2em; | |
font-weight: bold; | |
color: {grade_color}; | |
text-shadow: 1px 1px 2px rgba(0,0,0,0.1);'>{grade} ({score:,})</span> | |
</div> | |
</div> | |
</div> | |
""" | |
def get_hardware_info(item: dict) -> tuple: | |
"""νλμ¨μ΄ μ 보 μΆμΆ""" | |
try: | |
# runtime μ 보 νμΈ | |
runtime = item.get('runtime', {}) | |
# CPU μ 보 μ²λ¦¬ | |
cpu_info = runtime.get('cpu', 'Standard') | |
# GPU μ 보 μ²λ¦¬ | |
gpu_info = "None" | |
if runtime.get('accelerator') == "gpu": | |
gpu_type = runtime.get('gpu', {}).get('name', '') | |
gpu_memory = runtime.get('gpu', {}).get('memory', '') | |
if gpu_type: | |
gpu_info = f"{gpu_type}" | |
if gpu_memory: | |
gpu_info += f" ({gpu_memory}GB)" | |
# spaces decorator νμΈ | |
if '@spaces.GPU' in str(item.get('sdk_version', '')): | |
if gpu_info == "None": | |
gpu_info = "GPU Enabled" | |
# SDK μ 보 μ²λ¦¬ | |
sdk = item.get('sdk', 'N/A') | |
print(f"Debug - Runtime Info: {runtime}") # λλ²κ·Έ μΆλ ₯ | |
print(f"Debug - GPU Info: {gpu_info}") # λλ²κ·Έ μΆλ ₯ | |
return cpu_info, gpu_info, sdk | |
except Exception as e: | |
print(f"Error parsing hardware info: {str(e)}") | |
return 'Standard', 'None', 'N/A' | |
def get_card(item: dict, index: int, card_type: str = "space") -> str: | |
"""ν΅ν© μΉ΄λ HTML μμ±""" | |
item_id = item.get('id', '') | |
author, title = item_id.split('/', 1) | |
likes = format(item.get('likes', 0), ',') | |
created = item.get('createdAt', '').split('T')[0] | |
# URL μ μ | |
if card_type == "space": | |
url = f"https://huggingface.co/spaces/{item_id}" | |
elif card_type == "model": | |
url = f"https://huggingface.co/{item_id}" | |
else: # dataset | |
url = f"https://huggingface.co/datasets/{item_id}" | |
# λ©νλ°μ΄ν° μ²λ¦¬ | |
tags = item.get('tags', []) | |
pipeline_tag = item.get('pipeline_tag', '') | |
license = item.get('license', '') | |
sdk = item.get('sdk', 'N/A') | |
# AI Rating μ 보 κ°μ Έμ€κΈ° | |
rating_info = get_rating_info(item, index) | |
# μΉ΄λ νμ λ³ κ·ΈλΌλ°μ΄μ μ€μ | |
if card_type == "space": | |
gradient_colors = """ | |
rgba(255, 182, 193, 0.7), /* νμ€ν νν¬ */ | |
rgba(173, 216, 230, 0.7), /* νμ€ν λΈλ£¨ */ | |
rgba(255, 218, 185, 0.7) /* νμ€ν νΌμΉ */ | |
""" | |
bg_content = f""" | |
background-image: url(data:image/png;base64,{get_cached_screenshot(url) if get_cached_screenshot(url) else ''}); | |
background-size: cover; | |
background-position: center; | |
""" | |
type_icon = "π―" | |
type_label = "SPACE" | |
elif card_type == "model": | |
gradient_colors = """ | |
rgba(110, 142, 251, 0.7), /* λͺ¨λΈ λΈλ£¨ */ | |
rgba(130, 158, 251, 0.7), | |
rgba(150, 174, 251, 0.7) | |
""" | |
bg_content = f""" | |
background: linear-gradient(135deg, #6e8efb, #4a6cf7); | |
padding: 15px; | |
""" | |
type_icon = "π€" | |
type_label = "MODEL" | |
else: # dataset | |
gradient_colors = """ | |
rgba(255, 107, 107, 0.7), /* λ°μ΄ν°μ λ λ */ | |
rgba(255, 127, 127, 0.7), | |
rgba(255, 147, 147, 0.7) | |
""" | |
bg_content = f""" | |
background: linear-gradient(135deg, #ff6b6b, #ff8787); | |
padding: 15px; | |
""" | |
type_icon = "π" | |
type_label = "DATASET" | |
content_bg = f""" | |
background: linear-gradient(135deg, {gradient_colors}); | |
backdrop-filter: blur(10px); | |
""" | |
# νκ·Έ νμ (modelsμ datasetsμ©) | |
tags_html = "" | |
if card_type != "space": | |
tags_html = f""" | |
<div style=' | |
position: absolute; | |
top: 50%; | |
left: 50%; | |
transform: translate(-50%, -50%); | |
display: flex; | |
flex-wrap: wrap; | |
gap: 5px; | |
justify-content: center; | |
width: 90%;'> | |
{' '.join([f''' | |
<span style=' | |
background: rgba(255,255,255,0.2); | |
padding: 5px 10px; | |
border-radius: 15px; | |
color: white; | |
font-size: 0.8em;'> | |
#{tag} | |
</span> | |
''' for tag in tags[:5]])} | |
</div> | |
""" | |
# μΉ΄λ HTML λ°ν | |
return f""" | |
<div class="card" style=' | |
position: relative; | |
border: none; | |
padding: 0; | |
margin: 10px; | |
border-radius: 20px; | |
box-shadow: 0 10px 20px rgba(0,0,0,0.1); | |
background: white; | |
transition: all 0.3s ease; | |
overflow: hidden; | |
min-height: 400px; | |
cursor: pointer; | |
transform-origin: center;' | |
onmouseover="this.style.transform='scale(0.98) translateY(5px)'; this.style.boxShadow='0 5px 15px rgba(0,0,0,0.2)';" | |
onmouseout="this.style.transform='scale(1) translateY(0)'; this.style.boxShadow='0 10px 20px rgba(0,0,0,0.1)';" | |
onclick="window.open('{url}', '_blank')"> | |
<!-- μλ¨ μμ --> | |
<div style=' | |
width: 100%; | |
height: 200px; | |
{bg_content} | |
position: relative;'> | |
<!-- μμ λ±μ§ --> | |
<div style=' | |
position: absolute; | |
top: 10px; | |
left: 10px; | |
background: rgba(0,0,0,0.7); | |
color: white; | |
padding: 5px 15px; | |
border-radius: 20px; | |
font-weight: bold; | |
font-size: 0.9em; | |
backdrop-filter: blur(5px);'> | |
#{index + 1} | |
</div> | |
<!-- νμ λ±μ§ --> | |
<div style=' | |
position: absolute; | |
top: 10px; | |
right: 10px; | |
background: rgba(255,255,255,0.9); | |
padding: 5px 15px; | |
border-radius: 20px; | |
font-weight: bold; | |
font-size: 0.8em;'> | |
{type_icon} {type_label} | |
</div> | |
{tags_html} | |
</div> | |
<!-- μ½ν μΈ μμ --> | |
<div style=' | |
padding: 20px; | |
{content_bg} | |
border-radius: 0 0 20px 20px; | |
border-top: 1px solid rgba(255,255,255,0.5);'> | |
<h3 style=' | |
margin: 0 0 15px 0; | |
color: #333; | |
font-size: 1.3em; | |
line-height: 1.4; | |
display: -webkit-box; | |
-webkit-line-clamp: 2; | |
-webkit-box-orient: vertical; | |
overflow: hidden; | |
text-overflow: ellipsis; | |
text-shadow: 1px 1px 1px rgba(255,255,255,0.8);'> | |
{title} | |
</h3> | |
<div style=' | |
display: grid; | |
grid-template-columns: repeat(2, 1fr); | |
gap: 10px; | |
font-size: 0.9em; | |
background: rgba(255,255,255,0.3); | |
padding: 10px; | |
border-radius: 10px;'> | |
<div style='color: #444;'> | |
<span style='margin-right: 5px;'>π€</span> {author} | |
</div> | |
<div style='color: #444;'> | |
<span style='margin-right: 5px;'>β€οΈ</span> {likes} | |
</div> | |
<div style='color: #444; grid-column: span 2;'> | |
<span style='margin-right: 5px;'>π </span> {created} | |
</div> | |
</div> | |
{rating_info} | |
</div> | |
</div> | |
""" | |
def get_trending_spaces(search_query="", sort_by="rank", progress=gr.Progress()) -> Tuple[str, str]: | |
"""νΈλ λ© μ€νμ΄μ€ κ°μ Έμ€κΈ°""" | |
url = "https://huggingface.co/api/spaces" | |
try: | |
progress(0, desc="Fetching spaces data...") | |
params = { | |
'full': 'true', | |
'limit': 10 # κΈ°λ³Έ 300κ° μ μ§ | |
} | |
response = requests.get(url, params=params) | |
response.raise_for_status() | |
spaces = response.json() | |
# κ²μμ΄λ‘ νν°λ§ (κ²μμ΄κ° μλ κ²½μ°μλ§) | |
if search_query: | |
filtered_spaces = [] | |
# μΆκ° λ°μ΄ν° κ°μ Έμ€κΈ° (κ²μμ©) | |
params['limit'] = 10 | |
response = requests.get(url, params=params) | |
all_spaces = response.json() | |
filtered_spaces = [space for space in all_spaces if search_query.lower() in | |
(space.get('id', '') + space.get('title', '')).lower()] | |
spaces = filtered_spaces[:300] # μμ 300κ°λ§ μ μ§ | |
# μ λ ¬ (rankκ° μλ κ²½μ°μλ§) | |
if sort_by != "rank": # rankμΈ κ²½μ° κΈ°μ‘΄ μμ μ μ§ | |
if sort_by == "rising_rate": | |
spaces.sort(key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True) | |
elif sort_by == "popularity": | |
spaces.sort(key=lambda x: get_popularity_grade( | |
int(str(x.get('likes', '0')).replace(',', '')), | |
calculate_rising_rate(x.get('createdAt', ''), 0))[1], | |
reverse=True) | |
progress(0.1, desc="Creating gallery...") | |
html_content = """ | |
<div style='padding: 20px; background: #f5f5f5;'> | |
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'> | |
""" | |
for idx, space in enumerate(spaces): | |
html_content += get_card(space, idx, "space") | |
progress((0.1 + 0.9 * idx/len(spaces)), desc=f"Loading space {idx+1}/{len(spaces)}...") | |
html_content += "</div></div>" | |
progress(1.0, desc="Complete!") | |
return html_content, f"Found {len(spaces)} spaces" | |
except Exception as e: | |
error_html = f'<div style="color: red; padding: 20px;">Error: {str(e)}</div>' | |
return error_html, f"Error: {str(e)}" | |
def get_models(search_query="", sort_by="rank", progress=gr.Progress()) -> Tuple[str, str]: | |
"""μΈκΈ° λͺ¨λΈ κ°μ Έμ€κΈ°""" | |
url = "https://huggingface.co/api/models" | |
try: | |
progress(0, desc="Fetching models data...") | |
params = { | |
'full': 'true', | |
'limit': 300 # κΈ°λ³Έ 300κ° μ μ§ | |
} | |
response = requests.get(url, params=params) | |
response.raise_for_status() | |
models = response.json() | |
# κ²μμ΄λ‘ νν°λ§ (κ²μμ΄κ° μλ κ²½μ°μλ§) | |
if search_query: | |
filtered_models = [] | |
# μΆκ° λ°μ΄ν° κ°μ Έμ€κΈ° (κ²μμ©) | |
params['limit'] = 300 | |
response = requests.get(url, params=params) | |
all_models = response.json() | |
filtered_models = [model for model in all_models if search_query.lower() in | |
(model.get('id', '') + model.get('title', '')).lower()] | |
models = filtered_models[:300] # μμ 300κ°λ§ μ μ§ | |
# μ λ ¬ (rankκ° μλ κ²½μ°μλ§) | |
if sort_by != "rank": # rankμΈ κ²½μ° κΈ°μ‘΄ μμ μ μ§ | |
if sort_by == "rising_rate": | |
models.sort(key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True) | |
elif sort_by == "popularity": | |
models.sort(key=lambda x: get_popularity_grade( | |
int(str(x.get('likes', '0')).replace(',', '')), | |
calculate_rising_rate(x.get('createdAt', ''), 0))[1], | |
reverse=True) | |
progress(0.1, desc="Creating gallery...") | |
html_content = """ | |
<div style='padding: 20px; background: #f5f5f5;'> | |
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'> | |
""" | |
for idx, model in enumerate(models): | |
html_content += get_card(model, idx, "model") | |
progress((0.1 + 0.9 * idx/len(models)), desc=f"Loading model {idx+1}/{len(models)}...") | |
html_content += "</div></div>" | |
progress(1.0, desc="Complete!") | |
return html_content, f"Found {len(models)} models" | |
except Exception as e: | |
error_html = f'<div style="color: red; padding: 20px;">Error: {str(e)}</div>' | |
return error_html, f"Error: {str(e)}" | |
def get_datasets(search_query="", sort_by="rank", progress=gr.Progress()) -> Tuple[str, str]: | |
"""μΈκΈ° λ°μ΄ν°μ κ°μ Έμ€κΈ°""" | |
url = "https://huggingface.co/api/datasets" | |
try: | |
progress(0, desc="Fetching datasets data...") | |
params = { | |
'full': 'true', | |
'limit': 300 # κΈ°λ³Έ 300κ° μ μ§ | |
} | |
response = requests.get(url, params=params) | |
response.raise_for_status() | |
datasets = response.json() | |
# κ²μμ΄λ‘ νν°λ§ (κ²μμ΄κ° μλ κ²½μ°μλ§) | |
if search_query: | |
filtered_datasets = [] | |
# μΆκ° λ°μ΄ν° κ°μ Έμ€κΈ° (κ²μμ©) | |
params['limit'] = 300 | |
response = requests.get(url, params=params) | |
all_datasets = response.json() | |
filtered_datasets = [dataset for dataset in all_datasets if search_query.lower() in | |
(dataset.get('id', '') + dataset.get('title', '')).lower()] | |
datasets = filtered_datasets[:300] # μμ 300κ°λ§ μ μ§ | |
# μ λ ¬ (rankκ° μλ κ²½μ°μλ§) | |
if sort_by != "rank": # rankμΈ κ²½μ° κΈ°μ‘΄ μμ μ μ§ | |
if sort_by == "rising_rate": | |
datasets.sort(key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True) | |
elif sort_by == "popularity": | |
datasets.sort(key=lambda x: get_popularity_grade( | |
int(str(x.get('likes', '0')).replace(',', '')), | |
calculate_rising_rate(x.get('createdAt', ''), 0))[1], | |
reverse=True) | |
progress(0.1, desc="Creating gallery...") | |
html_content = """ | |
<div style='padding: 20px; background: #f5f5f5;'> | |
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'> | |
""" | |
for idx, dataset in enumerate(datasets): | |
html_content += get_card(dataset, idx, "dataset") | |
progress((0.1 + 0.9 * idx/len(datasets)), desc=f"Loading dataset {idx+1}/{len(datasets)}...") | |
html_content += "</div></div>" | |
progress(1.0, desc="Complete!") | |
return html_content, f"Found {len(datasets)} datasets" | |
except Exception as e: | |
error_html = f'<div style="color: red; padding: 20px;">Error: {str(e)}</div>' | |
return error_html, f"Error: {str(e)}" | |
# μ λ ¬ ν¨μ μΆκ° | |
def sort_items(items, sort_by): | |
if sort_by == "rank": | |
return items # μ΄λ―Έ μμλλ‘ μ λ ¬λμ΄ μμ | |
elif sort_by == "rising_rate": | |
return sorted(items, key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True) | |
elif sort_by == "popularity": | |
return sorted(items, key=lambda x: get_popularity_grade(int(str(x.get('likes', '0')).replace(',', '')), | |
calculate_rising_rate(x.get('createdAt', ''), 0))[1], reverse=True) | |
return items | |
# API νΈμΆ ν¨μ μμ | |
def fetch_items(item_type, search_query="", sort_by="rank", limit=1000): | |
"""μμ΄ν κ°μ Έμ€κΈ° (spaces/models/datasets)""" | |
base_url = f"https://huggingface.co/api/{item_type}" | |
params = { | |
'full': 'true', | |
'limit': limit, | |
'search': search_query | |
} | |
try: | |
response = requests.get(base_url, params=params) | |
response.raise_for_status() | |
items = response.json() | |
# κ²μμ΄λ‘ νν°λ§ | |
if search_query: | |
items = [item for item in items if search_query.lower() in | |
(item.get('id', '') + item.get('title', '')).lower()] | |
# μ λ ¬ | |
items = sort_items(items, sort_by) | |
return items[:300] # μμ 300κ°λ§ λ°ν | |
except Exception as e: | |
print(f"Error fetching items: {e}") | |
return [] | |
# μΈν°νμ΄μ€ μμ | |
def create_interface(): | |
with gr.Blocks(title="HuggingFace Trending Board") as interface: | |
gr.Markdown("# π€ HuggingFace Trending TOP 300 Board") | |
with gr.Tabs() as tabs: | |
# Spaces ν | |
with gr.Tab("π― Trending Spaces"): | |
with gr.Row(): | |
spaces_search = gr.Textbox(label="Search Spaces", placeholder="Enter search terms...") | |
spaces_sort = gr.Radio( | |
choices=["rank", "rising_rate", "popularity"], | |
value="rank", | |
label="Sort by", | |
interactive=True | |
) | |
spaces_refresh_btn = gr.Button("Refresh", variant="primary") | |
spaces_gallery = gr.HTML() | |
spaces_status = gr.Markdown("Ready") | |
# Models ν | |
with gr.Tab("π€ Trending Models"): | |
with gr.Row(): | |
models_search = gr.Textbox(label="Search Models", placeholder="Enter search terms...") | |
models_sort = gr.Radio( | |
choices=["rank", "rising_rate", "popularity"], | |
value="rank", | |
label="Sort by", | |
interactive=True | |
) | |
models_refresh_btn = gr.Button("Refresh", variant="primary") | |
models_gallery = gr.HTML() | |
models_status = gr.Markdown("Ready") | |
# Datasets ν | |
with gr.Tab("π Trending Datasets"): | |
with gr.Row(): | |
datasets_search = gr.Textbox(label="Search Datasets", placeholder="Enter search terms...") | |
datasets_sort = gr.Radio( | |
choices=["rank", "rising_rate", "popularity"], | |
value="rank", | |
label="Sort by", | |
interactive=True | |
) | |
datasets_refresh_btn = gr.Button("Refresh", variant="primary") | |
datasets_gallery = gr.HTML() | |
datasets_status = gr.Markdown("Ready") | |
# Event handlers | |
spaces_refresh_btn.click( | |
fn=get_trending_spaces, | |
inputs=[spaces_search, spaces_sort], | |
outputs=[spaces_gallery, spaces_status] | |
) | |
models_refresh_btn.click( | |
fn=get_models, | |
inputs=[models_search, models_sort], | |
outputs=[models_gallery, models_status] | |
) | |
datasets_refresh_btn.click( | |
fn=get_datasets, | |
inputs=[datasets_search, datasets_sort], | |
outputs=[datasets_gallery, datasets_status] | |
) | |
# κ²μμ΄ λ³κ²½ μ μλ μλ‘κ³ μΉ¨ | |
spaces_search.change( | |
fn=get_trending_spaces, | |
inputs=[spaces_search, spaces_sort], | |
outputs=[spaces_gallery, spaces_status] | |
) | |
models_search.change( | |
fn=get_models, | |
inputs=[models_search, models_sort], | |
outputs=[models_gallery, models_status] | |
) | |
datasets_search.change( | |
fn=get_datasets, | |
inputs=[datasets_search, datasets_sort], | |
outputs=[datasets_gallery, datasets_status] | |
) | |
# μ λ ¬ λ°©μ λ³κ²½ μ μλ μλ‘κ³ μΉ¨ | |
spaces_sort.change( | |
fn=get_trending_spaces, | |
inputs=[spaces_search, spaces_sort], | |
outputs=[spaces_gallery, spaces_status] | |
) | |
models_sort.change( | |
fn=get_models, | |
inputs=[models_search, models_sort], | |
outputs=[models_gallery, models_status] | |
) | |
datasets_sort.change( | |
fn=get_datasets, | |
inputs=[datasets_search, datasets_sort], | |
outputs=[datasets_gallery, datasets_status] | |
) | |
return interface | |
if __name__ == "__main__": | |
try: | |
demo = create_interface() | |
demo.launch( | |
share=True, | |
inbrowser=True, | |
show_api=False | |
) | |
except Exception as e: | |
print(f"Error launching app: {e}") |