openfree's picture
Update app.py
fcb56a6 verified
raw
history blame
51.1 kB
import os
import random
import base64
import requests
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.common.exceptions import WebDriverException, TimeoutException
from PIL import Image
from io import BytesIO
from datetime import datetime
import gradio as gr
from typing import List, Tuple # List μΆ”κ°€
import time
from pathlib import Path
from datetime import datetime, timedelta
from huggingface_hub import InferenceClient
from dotenv import load_dotenv
from bs4 import BeautifulSoup
from urllib.parse import urljoin
from gtts import gTTS
from moviepy.editor import VideoFileClip, AudioFileClip, ImageClip, concatenate_videoclips
import tempfile
import shutil
import numpy as np
# .env νŒŒμΌμ—μ„œ ν™˜κ²½ λ³€μˆ˜ λ‘œλ“œ
load_dotenv()
# HuggingFace 인퍼런슀 ν΄λΌμ΄μ–ΈνŠΈ μ„€μ •
hf_client = InferenceClient(
"CohereForAI/c4ai-command-r-plus-08-2024",
token=os.getenv("HF_TOKEN")
)
# μŠ€ν¬λ¦°μƒ· μΊμ‹œ 디렉토리 μ„€μ •
CACHE_DIR = Path("screenshot_cache")
CACHE_DIR.mkdir(exist_ok=True)
# μ „μ—­ λ³€μˆ˜λ‘œ μŠ€ν¬λ¦°μƒ· μΊμ‹œ μ„ μ–Έ
SCREENSHOT_CACHE = {}
def get_cached_screenshot(url: str) -> str:
"""μΊμ‹œλœ μŠ€ν¬λ¦°μƒ· κ°€μ Έμ˜€κΈ° λ˜λŠ” μƒˆλ‘œ 생성"""
try:
# URL을 μ•ˆμ „ν•œ 파일λͺ…μœΌλ‘œ λ³€ν™˜
safe_filename = base64.urlsafe_b64encode(url.encode()).decode()
cache_file = CACHE_DIR / f"{safe_filename[:200]}.jpg" # PNG λŒ€μ‹  JPG μ‚¬μš©
if cache_file.exists():
try:
with Image.open(cache_file) as img:
buffered = BytesIO()
img.save(buffered, format="JPEG", quality=85, optimize=True)
return base64.b64encode(buffered.getvalue()).decode()
except Exception as e:
print(f"Cache read error for {url}: {e}")
if cache_file.exists():
cache_file.unlink()
return take_screenshot(url)
except Exception as e:
print(f"Screenshot cache error for {url}: {e}")
return ""
def take_screenshot(url: str) -> str:
"""μ›Ήμ‚¬μ΄νŠΈ μŠ€ν¬λ¦°μƒ· 촬영"""
if not url.startswith('http'):
url = f"https://{url}"
options = webdriver.ChromeOptions()
options.add_argument('--headless')
options.add_argument('--no-sandbox')
options.add_argument('--disable-dev-shm-usage')
options.add_argument('--window-size=1080,720')
driver = None
try:
driver = webdriver.Chrome(options=options)
driver.get(url)
# νŽ˜μ΄μ§€ λ‘œλ”© λŒ€κΈ°
WebDriverWait(driver, 15).until(
EC.presence_of_element_located((By.TAG_NAME, "body"))
)
# μΆ”κ°€ λŒ€κΈ° μ‹œκ°„
time.sleep(3)
# μŠ€ν¬λ¦°μƒ· 촬영 및 μ΅œμ ν™”
screenshot = driver.get_screenshot_as_png()
img = Image.open(BytesIO(screenshot))
# 이미지 크기 μ΅œμ ν™”
max_size = (800, 600)
img.thumbnail(max_size, Image.Resampling.LANCZOS)
# JPEG둜 λ³€ν™˜ 및 μ΅œμ ν™”
if img.mode in ('RGBA', 'LA'):
background = Image.new('RGB', img.size, (255, 255, 255))
background.paste(img, mask=img.split()[-1])
img = background
# μΊμ‹œ μ €μž₯
safe_filename = base64.urlsafe_b64encode(url.encode()).decode()
cache_file = CACHE_DIR / f"{safe_filename[:200]}.jpg"
img.save(cache_file, format="JPEG", quality=85, optimize=True)
# λ°˜ν™˜μš© 이미지 생성
buffered = BytesIO()
img.save(buffered, format="JPEG", quality=85, optimize=True)
return base64.b64encode(buffered.getvalue()).decode()
except Exception as e:
print(f"Screenshot error for {url}: {e}")
return ""
finally:
if driver:
driver.quit()
def cleanup_cache():
"""μΊμ‹œ 정리"""
try:
current_time = time.time()
for cache_file in CACHE_DIR.glob("*.jpg"):
try:
# 24μ‹œκ°„ 이상 된 파일 λ˜λŠ” 0λ°”μ΄νŠΈ 파일 μ‚­μ œ
if (current_time - cache_file.stat().st_mtime > 86400) or cache_file.stat().st_size == 0:
cache_file.unlink()
except Exception as e:
print(f"Error cleaning cache file {cache_file}: {e}")
except Exception as e:
print(f"Cache cleanup error: {e}")
# μ•± μ‹œμž‘ μ‹œ μΊμ‹œ 정리
cleanup_cache()
def calculate_rising_rate(created_date: str, rank: int) -> int:
"""AI Rising Rate 계산"""
# 생성일 κΈ°μ€€ 점수 계산
created = datetime.strptime(created_date.split('T')[0], '%Y-%m-%d')
today = datetime.now()
days_diff = (today - created).days
date_score = max(0, 300 - days_diff) # μ΅œλŒ€ 300점
# μˆœμœ„ κΈ°μ€€ 점수 계산
rank_score = max(0, 600 - rank) # μ΅œλŒ€ 300점
# 총점 계산
total_score = date_score + rank_score
# 별 개수 계산 (0~5)
if total_score <= 200:
stars = 1
elif total_score <= 400:
stars = 2
elif total_score <= 600:
stars = 3
elif total_score <= 800:
stars = 4
else:
stars = 5
return stars
def get_popularity_grade(likes: int, stars: int) -> tuple:
"""AI Popularity Score λ“±κΈ‰ 계산"""
# 기본 점수 (likes)
base_score = min(likes, 10000) # μ΅œλŒ€ 10000점
# 별점 μΆ”κ°€ 점수 (별 ν•˜λ‚˜λ‹Ή 500점)
star_score = stars * 1000
# 총점
total_score = base_score + star_score
# λ“±κΈ‰ ν…Œμ΄λΈ” (18단계)
grades = [
(14500, "AAA+"), (14000, "AAA"), (13500, "AAA-"),
(13000, "AA+"), (12500, "AA"), (12000, "AA-"),
(11500, "A+"), (11000, "A"), (10000, "A-"),
(9000, "BBB+"), (8000, "BBB"), (7000, "BBB-"),
(6000, "BB+"), (5000, "BB"), (4000, "BB-"),
(3000, "B+"), (2000, "B"), (1000, "B-")
]
for threshold, grade in grades:
if total_score >= threshold:
return grade, total_score
return "B-", total_score
# get_card ν•¨μˆ˜ λ‚΄μ˜ hardware_info 뢀뢄을 λ‹€μŒμœΌλ‘œ ꡐ체:
def get_rating_info(item: dict, index: int) -> str:
"""평가 정보 HTML 생성"""
created = item.get('createdAt', '').split('T')[0]
likes = int(str(item.get('likes', '0')).replace(',', ''))
# AI Rising Rate 계산
stars = calculate_rising_rate(created, index + 1)
star_html = "β˜…" * stars + "β˜†" * (5 - stars) # μ±„μ›Œμ§„ 별과 빈 별 μ‘°ν•©
# AI Popularity Score 계산
grade, score = get_popularity_grade(likes, stars)
# 등급별 색상 μ„€μ •
grade_colors = {
'AAA': '#FFD700', 'AA': '#FFA500', 'A': '#FF4500',
'BBB': '#4169E1', 'BB': '#1E90FF', 'B': '#00BFFF'
}
grade_base = grade.rstrip('+-')
grade_color = grade_colors.get(grade_base, '#666666')
return f"""
<div style='
margin-top: 15px;
padding: 15px;
background: rgba(255,255,255,0.4);
border-radius: 10px;
font-size: 0.9em;
box-shadow: 0 2px 10px rgba(0,0,0,0.1);'>
<div style='
display: grid;
grid-template-columns: repeat(2, 1fr);
gap: 15px;'>
<div style='
color: #333;
display: flex;
flex-direction: column;
gap: 5px;'>
<span style='font-weight: bold;'>AI Rising Rate:</span>
<span style='
color: #FF8C00;
font-size: 1.4em;
letter-spacing: 2px;
text-shadow: 1px 1px 2px rgba(0,0,0,0.1);'>{star_html}</span>
</div>
<div style='
color: #333;
display: flex;
flex-direction: column;
gap: 5px;'>
<span style='font-weight: bold;'>AI Popularity Score:</span>
<span style='
font-size: 1.2em;
font-weight: bold;
color: {grade_color};
text-shadow: 1px 1px 2px rgba(0,0,0,0.1);'>{grade} ({score:,})</span>
</div>
</div>
</div>
"""
def get_hardware_info(item: dict) -> tuple:
"""ν•˜λ“œμ›¨μ–΄ 정보 μΆ”μΆœ"""
try:
# runtime 정보 확인
runtime = item.get('runtime', {})
# CPU 정보 처리
cpu_info = runtime.get('cpu', 'Standard')
# GPU 정보 처리
gpu_info = "None"
if runtime.get('accelerator') == "gpu":
gpu_type = runtime.get('gpu', {}).get('name', '')
gpu_memory = runtime.get('gpu', {}).get('memory', '')
if gpu_type:
gpu_info = f"{gpu_type}"
if gpu_memory:
gpu_info += f" ({gpu_memory}GB)"
# spaces decorator 확인
if '@spaces.GPU' in str(item.get('sdk_version', '')):
if gpu_info == "None":
gpu_info = "GPU Enabled"
# SDK 정보 처리
sdk = item.get('sdk', 'N/A')
print(f"Debug - Runtime Info: {runtime}") # 디버그 좜λ ₯
print(f"Debug - GPU Info: {gpu_info}") # 디버그 좜λ ₯
return cpu_info, gpu_info, sdk
except Exception as e:
print(f"Error parsing hardware info: {str(e)}")
return 'Standard', 'None', 'N/A'
def get_card(item: dict, index: int, card_type: str = "space") -> str:
"""톡합 μΉ΄λ“œ HTML 생성"""
item_id = item.get('id', '')
author, title = item_id.split('/', 1)
likes = format(item.get('likes', 0), ',')
created = item.get('createdAt', '').split('T')[0]
# short_description κ°€μ Έμ˜€κΈ°
short_description = item.get('cardData', {}).get('short_description', '')
# URL μ •μ˜
if card_type == "space":
url = f"https://huggingface.co/spaces/{item_id}"
elif card_type == "model":
url = f"https://huggingface.co/{item_id}"
else: # dataset
url = f"https://huggingface.co/datasets/{item_id}"
# 메타데이터 처리
tags = item.get('tags', [])
pipeline_tag = item.get('pipeline_tag', '')
license = item.get('license', '')
sdk = item.get('sdk', 'N/A')
# AI Rating 정보 κ°€μ Έμ˜€κΈ°
rating_info = get_rating_info(item, index)
# μΉ΄λ“œ νƒ€μž…λ³„ κ·ΈλΌλ°μ΄μ…˜ μ„€μ •
if card_type == "space":
gradient_colors = """
rgba(255, 182, 193, 0.7), /* νŒŒμŠ€ν…” 핑크 */
rgba(173, 216, 230, 0.7), /* νŒŒμŠ€ν…” 블루 */
rgba(255, 218, 185, 0.7) /* νŒŒμŠ€ν…” ν”ΌμΉ˜ */
"""
bg_content = f"""
background-image: url(data:image/png;base64,{get_cached_screenshot(url) if get_cached_screenshot(url) else ''});
background-size: cover;
background-position: center;
"""
type_icon = "🎯"
type_label = "SPACE"
elif card_type == "model":
gradient_colors = """
rgba(110, 142, 251, 0.7), /* λͺ¨λΈ 블루 */
rgba(130, 158, 251, 0.7),
rgba(150, 174, 251, 0.7)
"""
bg_content = f"""
background: linear-gradient(135deg, #6e8efb, #4a6cf7);
padding: 15px;
"""
type_icon = "πŸ€–"
type_label = "MODEL"
else: # dataset
gradient_colors = """
rgba(255, 107, 107, 0.7), /* 데이터셋 λ ˆλ“œ */
rgba(255, 127, 127, 0.7),
rgba(255, 147, 147, 0.7)
"""
bg_content = f"""
background: linear-gradient(135deg, #ff6b6b, #ff8787);
padding: 15px;
"""
type_icon = "πŸ“Š"
type_label = "DATASET"
content_bg = f"""
background: linear-gradient(135deg, {gradient_colors});
backdrop-filter: blur(10px);
"""
# νƒœκ·Έ ν‘œμ‹œ (models와 datasets용)
tags_html = ""
if card_type != "space":
tags_html = f"""
<div style='
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
display: flex;
flex-wrap: wrap;
gap: 5px;
justify-content: center;
width: 90%;'>
{' '.join([f'''
<span style='
background: rgba(255,255,255,0.2);
padding: 5px 10px;
border-radius: 15px;
color: white;
font-size: 0.8em;'>
#{tag}
</span>
''' for tag in tags[:5]])}
</div>
"""
# μΉ΄λ“œ HTML λ°˜ν™˜
return f"""
<div class="card" style='
position: relative;
border: none;
padding: 0;
margin: 10px;
border-radius: 20px;
box-shadow: 0 10px 20px rgba(0,0,0,0.1);
background: white;
transition: all 0.3s ease;
overflow: hidden;
min-height: 400px;
cursor: pointer;
transform-origin: center;'
onmouseover="this.style.transform='scale(0.98) translateY(5px)'; this.style.boxShadow='0 5px 15px rgba(0,0,0,0.2)';"
onmouseout="this.style.transform='scale(1) translateY(0)'; this.style.boxShadow='0 10px 20px rgba(0,0,0,0.1)';"
onclick="window.open('{url}', '_blank')">
<!-- 상단 μ˜μ—­ -->
<div style='
width: 100%;
height: 200px;
{bg_content}
position: relative;'>
<!-- μˆœμœ„ 뱃지 -->
<div style='
position: absolute;
top: 10px;
left: 10px;
background: rgba(0,0,0,0.7);
color: white;
padding: 5px 15px;
border-radius: 20px;
font-weight: bold;
font-size: 0.9em;
backdrop-filter: blur(5px);'>
#{index + 1}
</div>
<!-- νƒ€μž… 뱃지 -->
<div style='
position: absolute;
top: 10px;
right: 10px;
background: rgba(255,255,255,0.9);
padding: 5px 15px;
border-radius: 20px;
font-weight: bold;
font-size: 0.8em;'>
{type_icon} {type_label}
</div>
{tags_html}
</div>
<!-- μ½˜ν…μΈ  μ˜μ—­ -->
<div style='
padding: 20px;
{content_bg}
border-radius: 0 0 20px 20px;
border-top: 1px solid rgba(255,255,255,0.5);'>
<h3 style='
margin: 0 0 15px 0;
color: #333;
font-size: 1.3em;
line-height: 1.4;
display: -webkit-box;
-webkit-line-clamp: 2;
-webkit-box-orient: vertical;
overflow: hidden;
text-overflow: ellipsis;
text-shadow: 1px 1px 1px rgba(255,255,255,0.8);'>
{title}
</h3>
{f'''
<!-- Short Description (Space μΉ΄λ“œμ—λ§Œ ν‘œμ‹œ) -->
<div style='
margin: 0 0 15px 0;
color: #444;
font-size: 0.9em;
line-height: 1.5;
display: -webkit-box;
-webkit-line-clamp: 3;
-webkit-box-orient: vertical;
overflow: hidden;
text-overflow: ellipsis;
background: rgba(255,255,255,0.4);
padding: 10px;
border-radius: 8px;'>
{short_description}
</div>
''' if card_type == "space" and short_description else ''}
<div style='
display: grid;
grid-template-columns: repeat(2, 1fr);
gap: 10px;
font-size: 0.9em;
background: rgba(255,255,255,0.3);
padding: 10px;
border-radius: 10px;'>
<div style='color: #444;'>
<span style='margin-right: 5px;'>πŸ‘€</span> {author}
</div>
<div style='color: #444;'>
<span style='margin-right: 5px;'>❀️</span> {likes}
</div>
<div style='color: #444; grid-column: span 2;'>
<span style='margin-right: 5px;'>πŸ“…</span> {created}
</div>
</div>
{rating_info}
</div>
</div>
"""
def get_trending_spaces(search_query="", sort_by="rank", progress=gr.Progress()) -> Tuple[str, str]:
"""νŠΈλ Œλ”© 슀페이슀 κ°€μ Έμ˜€κΈ°"""
url = "https://huggingface.co/api/spaces"
try:
progress(0, desc="Fetching spaces data...")
params = {
'full': 'true',
'limit': 24
}
response = requests.get(url, params=params)
response.raise_for_status()
spaces = response.json()
# κ²€μƒ‰μ–΄λ‘œ 필터링
if search_query:
spaces = [space for space in spaces if search_query.lower() in
(space.get('id', '') + ' ' + space.get('title', '')).lower()]
# μ •λ ¬
sort_by = sort_by.lower()
if sort_by == "rising_rate":
spaces.sort(key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True)
elif sort_by == "popularity":
spaces.sort(key=lambda x: get_popularity_grade(
int(str(x.get('likes', '0')).replace(',', '')),
calculate_rising_rate(x.get('createdAt', ''), 0))[1],
reverse=True)
progress(0.1, desc="Creating gallery...")
html_content = """
<div style='padding: 20px; background: #f5f5f5;'>
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
"""
for idx, space in enumerate(spaces):
html_content += get_card(space, idx, "space")
progress((0.1 + 0.9 * idx/len(spaces)), desc=f"Loading space {idx+1}/{len(spaces)}...")
html_content += "</div></div>"
progress(1.0, desc="Complete!")
return html_content, f"Found {len(spaces)} spaces"
except Exception as e:
error_html = f'<div style="color: red; padding: 20px;">Error: {str(e)}</div>'
return error_html, f"Error: {str(e)}"
def get_models(search_query="", sort_by="rank", progress=gr.Progress()) -> Tuple[str, str]:
"""인기 λͺ¨λΈ κ°€μ Έμ˜€κΈ°"""
url = "https://huggingface.co/api/models"
try:
progress(0, desc="Fetching models data...")
params = {
'full': 'true',
'limit': 300
}
response = requests.get(url, params=params)
response.raise_for_status()
models = response.json()
# κ²€μƒ‰μ–΄λ‘œ 필터링
if search_query:
models = [model for model in models if search_query.lower() in
(model.get('id', '') + ' ' + model.get('title', '')).lower()]
# μ •λ ¬
sort_by = sort_by.lower()
if sort_by == "rising_rate":
models.sort(key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True)
elif sort_by == "popularity":
models.sort(key=lambda x: get_popularity_grade(
int(str(x.get('likes', '0')).replace(',', '')),
calculate_rising_rate(x.get('createdAt', ''), 0))[1],
reverse=True)
progress(0.1, desc="Creating gallery...")
html_content = """
<div style='padding: 20px; background: #f5f5f5;'>
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
"""
for idx, model in enumerate(models):
html_content += get_card(model, idx, "model")
progress((0.1 + 0.9 * idx/len(models)), desc=f"Loading model {idx+1}/{len(models)}...")
html_content += "</div></div>"
progress(1.0, desc="Complete!")
return html_content, f"Found {len(models)} models"
except Exception as e:
error_html = f'<div style="color: red; padding: 20px;">Error: {str(e)}</div>'
return error_html, f"Error: {str(e)}"
def get_datasets(search_query="", sort_by="rank", progress=gr.Progress()) -> Tuple[str, str]:
"""인기 데이터셋 κ°€μ Έμ˜€κΈ°"""
url = "https://huggingface.co/api/datasets"
try:
progress(0, desc="Fetching datasets data...")
params = {
'full': 'true',
'limit': 300
}
response = requests.get(url, params=params)
response.raise_for_status()
datasets = response.json()
# κ²€μƒ‰μ–΄λ‘œ 필터링
if search_query:
datasets = [dataset for dataset in datasets if search_query.lower() in
(dataset.get('id', '') + ' ' + dataset.get('title', '')).lower()]
# μ •λ ¬
sort_by = sort_by.lower()
if sort_by == "rising_rate":
datasets.sort(key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True)
elif sort_by == "popularity":
datasets.sort(key=lambda x: get_popularity_grade(
int(str(x.get('likes', '0')).replace(',', '')),
calculate_rising_rate(x.get('createdAt', ''), 0))[1],
reverse=True)
progress(0.1, desc="Creating gallery...")
html_content = """
<div style='padding: 20px; background: #f5f5f5;'>
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
"""
for idx, dataset in enumerate(datasets):
html_content += get_card(dataset, idx, "dataset")
progress((0.1 + 0.9 * idx/len(datasets)), desc=f"Loading dataset {idx+1}/{len(datasets)}...")
html_content += "</div></div>"
progress(1.0, desc="Complete!")
return html_content, f"Found {len(datasets)} datasets"
except Exception as e:
error_html = f'<div style="color: red; padding: 20px;">Error: {str(e)}</div>'
return error_html, f"Error: {str(e)}"
# μ •λ ¬ ν•¨μˆ˜ μΆ”κ°€
def sort_items(items, sort_by):
if sort_by == "rank":
return items # 이미 μˆœμœ„λŒ€λ‘œ μ •λ ¬λ˜μ–΄ 있음
elif sort_by == "rising_rate":
return sorted(items, key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True)
elif sort_by == "popularity":
return sorted(items, key=lambda x: get_popularity_grade(int(str(x.get('likes', '0')).replace(',', '')),
calculate_rising_rate(x.get('createdAt', ''), 0))[1], reverse=True)
return items
# API 호좜 ν•¨μˆ˜ μˆ˜μ •
def fetch_items(item_type, search_query="", sort_by="rank", limit=1000):
"""μ•„μ΄ν…œ κ°€μ Έμ˜€κΈ° (spaces/models/datasets)"""
base_url = f"https://huggingface.co/api/{item_type}"
params = {
'full': 'true',
'limit': limit,
'search': search_query
}
try:
response = requests.get(base_url, params=params)
response.raise_for_status()
items = response.json()
# κ²€μƒ‰μ–΄λ‘œ 필터링
if search_query:
items = [item for item in items if search_query.lower() in
(item.get('id', '') + item.get('title', '')).lower()]
# μ •λ ¬
items = sort_items(items, sort_by)
return items[:300] # μƒμœ„ 300개만 λ°˜ν™˜
except Exception as e:
print(f"Error fetching items: {e}")
return []
def get_space_source(space_id: str) -> dict:
"""슀페이슀의 μ†ŒμŠ€μ½”λ“œ κ°€μ Έμ˜€κΈ°"""
try:
# HuggingFace APIλ₯Ό 톡해 μ†ŒμŠ€μ½”λ“œ κ°€μ Έμ˜€κΈ°
headers = {"Authorization": f"Bearer {os.getenv('HF_TOKEN')}"}
# app.py μ‹œλ„
app_url = f"https://huggingface.co/spaces/{space_id}/raw/main/app.py"
app_response = requests.get(app_url, headers=headers)
# index.html μ‹œλ„
index_url = f"https://huggingface.co/spaces/{space_id}/raw/main/index.html"
index_response = requests.get(index_url, headers=headers)
source = {
"app.py": app_response.text if app_response.status_code == 200 else "",
"index.html": index_response.text if index_response.status_code == 200 else ""
}
return source
except Exception as e:
print(f"Error fetching source for {space_id}: {str(e)}")
return {"app.py": "", "index.html": ""}
def analyze_space(space_info: dict) -> str:
"""슀페이슀 뢄석"""
try:
space_id = space_info.get('id', '')
url = f"https://huggingface.co/spaces/{space_id}"
# μ†ŒμŠ€μ½”λ“œ κ°€μ Έμ˜€κΈ°
source = get_space_source(space_id)
source_code = source["app.py"] or source["index.html"]
if not source_code:
return f"""
<div style='
padding: 20px;
color: #333 !important;
background: white !important;
'>
<h3 style='color: #333 !important; margin-bottom: 10px;'>
#{space_info.get('rank', '0')} {space_id}
</h3>
<p style='color: red;'>μ†ŒμŠ€μ½”λ“œλ₯Ό κ°€μ Έμ˜¬ 수 μ—†μŠ΅λ‹ˆλ‹€.</p>
</div>
"""
# LLM 뢄석 ν”„λ‘¬ν”„νŠΈ
prompt = f"""
λ‹€μŒμ€ HuggingFace 슀페이슀({url})의 μ½”λ“œ λ˜λŠ” μ£Όμ„μž…λ‹ˆλ‹€:
```
{source_code[:4000]}
```
이 λ‚΄μš©μ„ 기반으둜 λ‹€μŒ ν•­λͺ©μ„ λΆ„μ„ν•΄μ£Όμ„Έμš”:
1. κ°œμš”: (ν•œ μ€„λ‘œ)
2. μš”μ•½: (ν•œ μ€„λ‘œ)
3. νŠΉμ§• 및 μž₯점: (ν•œ μ€„λ‘œ)
4. μ‚¬μš© λŒ€μƒ: (ν•œ μ€„λ‘œ)
5. μ‚¬μš© 방법: (ν•œ μ€„λ‘œ)
6. μœ μ‚¬ μ„œλΉ„μŠ€μ™€μ˜ 차별점: (ν•œ μ€„λ‘œ)
각 ν•­λͺ©μ€ μ‹€μ œ ν™•μΈλœ λ‚΄μš©λ§Œ ν¬ν•¨ν•˜μ—¬ ν•œ μ€„λ‘œ μž‘μ„±ν•˜μ„Έμš”.
μ½”λ“œκ°€ λ³΄μ•ˆμ²˜λ¦¬λœ 경우 주석을 기반으둜 λΆ„μ„ν•˜μ„Έμš”.
"""
# LLM 뢄석 μ‹€ν–‰
messages = [
{"role": "system", "content": "μ†ŒμŠ€μ½”λ“œ 뢄석 μ „λ¬Έκ°€λ‘œμ„œ μ‹€μ œ μ½”λ“œ λ‚΄μš©λ§Œ 기반으둜 λΆ„μ„ν•˜μ„Έμš”."},
{"role": "user", "content": prompt}
]
response = hf_client.chat_completion(
messages,
max_tokens=3800,
temperature=0.3
)
analysis = response.choices[0].message.content
return f"""
<div style='
padding: 20px;
color: #333 !important;
background: white !important;
'>
<h3 style='color: #333 !important; margin-bottom: 15px;'>
#{space_info.get('rank', '0')} {space_id}
</h3>
<div style='color: #444 !important; font-size: 0.95em;'>
{analysis}
</div>
<div style='margin-top: 15px;'>
<details>
<summary style='cursor: pointer; color: #666 !important;'>
μ†ŒμŠ€μ½”λ“œ 미리보기
</summary>
<pre style='
background: #f5f5f5 !important;
padding: 10px;
border-radius: 5px;
font-size: 0.8em;
margin-top: 10px;
white-space: pre-wrap;
word-break: break-all;
color: #333 !important;
'>{source_code[:500]}...</pre>
</details>
</div>
</div>
"""
except Exception as e:
return f"<div style='color: red !important; padding: 20px;'>뢄석 였λ₯˜: {str(e)}</div>"
def analyze_top_spaces(progress=gr.Progress()) -> Tuple[str, str]:
"""μƒμœ„ 24개 슀페이슀 뢄석"""
try:
progress(0, desc="슀페이슀 데이터 κ°€μ Έμ˜€λŠ” 쀑...")
url = "https://huggingface.co/api/spaces"
response = requests.get(url, params={'full': 'true', 'limit': 24})
response.raise_for_status()
spaces = response.json()[:24]
# 상단 μž…λ ₯ λ°•μŠ€μ™€ κΈ°λ³Έ ν…μŠ€νŠΈλ₯Ό ν¬ν•¨ν•œ HTML μ‹œμž‘
html_content = """
<div style='padding: 20px; background: #ffffff;'>
<div style='margin-bottom: 30px;'>
<textarea id='intro_text' rows='4' style='
width: 100%;
padding: 15px;
border: 1px solid #ddd;
border-radius: 10px;
font-size: 1.1em;
line-height: 1.5;
resize: vertical;
background: #f8f9fa;
'>μ•ˆλ…•ν•˜μ„Έμš”. 맀일 κΈ€λ‘œλ²Œ μ΅œμ‹  AI 인기 νŠΈλ Œλ“œ μ„œλΉ„μŠ€λ₯Ό μ•Œμ•„λ³΄λŠ” '데일리 AI νŠΈλ Œλ”©' λ‰΄μŠ€μž…λ‹ˆλ‹€. 였늘의 ν—ˆκΉ…νŽ˜μ΄μŠ€ 인기 μˆœμœ„ 1μœ„λΆ€ν„° 24μœ„κΉŒμ§€, 뢄석과 핡심 λ‚΄μš©μ„ μ‚΄νŽ΄λ³΄κ² μŠ΅λ‹ˆλ‹€.</textarea>
</div>
<style>
.script-card {
background: white !important;
border-radius: 10px;
padding: 20px;
margin-bottom: 20px;
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
border: 1px solid #e0e0e0;
}
.script-content {
color: #444 !important;
font-size: 1.1em;
line-height: 1.6;
white-space: pre-line;
}
</style>
"""
for idx, space in enumerate(spaces):
progress((idx + 1) / 24, desc=f"뢄석 쀑... {idx+1}/24")
try:
source = get_space_source(space['id'])
source_code = source["app.py"] or source["index.html"]
# 슀페이슀 IDμ—μ„œ μ‚¬μš©μžλͺ… μ œκ±°ν•˜κ³  ν”„λ‘œμ νŠΈλͺ…λ§Œ μΆ”μΆœ
project_name = space['id'].split('/')[-1]
prompt = f"""
λ‹€μŒ HuggingFace 슀페이슀λ₯Ό 유튜브 λ‰΄μŠ€ 리포트 ν˜•μ‹μœΌλ‘œ μ„€λͺ…ν•΄μ£Όμ„Έμš”.
μ‹œμž‘μ€ λ°˜λ“œμ‹œ "였늘의 μΈκΈ°μˆœμœ„ {idx + 1}μœ„μΈ {project_name}μž…λ‹ˆλ‹€."둜 μ‹œμž‘ν•˜κ³ ,
μ΄μ–΄μ„œ μ£Όμš” κΈ°λŠ₯, νŠΉμ§•, ν™œμš©λ°©μ•ˆμ„ 2-3λ¬Έμž₯으둜 μžμ—°μŠ€λŸ½κ²Œ μ„€λͺ…ν•΄μ£Όμ„Έμš”.
전체 κΈΈμ΄λŠ” 3-4λ¬Έμž₯으둜 μ œν•œν•˜κ³ , μ„€λͺ…은 λ‰΄μŠ€ λ¦¬ν¬ν„°μ²˜λŸΌ λͺ…ν™•ν•˜κ³  μ „λ¬Έμ μœΌλ‘œ ν•΄μ£Όμ„Έμš”.
μ†ŒμŠ€μ½”λ“œ:
```
{source_code[:1500]}
```
"""
messages = [
{"role": "system", "content": "AI 기술 μ „λ¬Έ λ‰΄μŠ€ λ¦¬ν¬ν„°μž…λ‹ˆλ‹€."},
{"role": "user", "content": prompt}
]
response = hf_client.chat_completion(
messages,
max_tokens=200,
temperature=0.7
)
script = response.choices[0].message.content.strip()
html_content += f"""
<div class='script-card'>
<div class='script-content'>{script}</div>
</div>
"""
except Exception as e:
print(f"Error analyzing space {space['id']}: {e}")
html_content += f"""
<div class='script-card'>
<div class='script-content' style='color: red !important;'>
μˆœμœ„ {idx + 1}μœ„ 뢄석 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€.
</div>
</div>
"""
html_content += "</div>"
return html_content, f"24개 슀페이슀 뢄석 μ™„λ£Œ"
except Exception as e:
error_msg = f"Error: {str(e)}"
return f"<div style='color: red; padding: 20px;'>{error_msg}</div>", error_msg
def analyze_single_space(space: dict, source_code: str) -> str:
"""단일 슀페이슀 뢄석"""
try:
if not source_code:
return "μ†ŒμŠ€μ½”λ“œλ₯Ό κ°€μ Έμ˜¬ 수 μ—†μŠ΅λ‹ˆλ‹€."
prompt = f"""
λ‹€μŒ 슀페이슀의 μ†ŒμŠ€μ½”λ“œλ₯Ό λΆ„μ„ν•΄μ£Όμ„Έμš”:
```
{source_code[:4000]}
```
λ‹€μŒ ν•­λͺ©μ„ 각각 ν•œ μ€„λ‘œ μš”μ•½ν•΄μ£Όμ„Έμš”:
1. κ°œμš”:
2. μš”μ•½:
3. νŠΉμ§• 및 μž₯점:
4. μ‚¬μš© λŒ€μƒ:
5. μ‚¬μš© 방법:
6. μœ μ‚¬ μ„œλΉ„μŠ€μ™€μ˜ 차별점:
"""
messages = [
{"role": "system", "content": "μ†ŒμŠ€μ½”λ“œ 뢄석 μ „λ¬Έκ°€μž…λ‹ˆλ‹€."},
{"role": "user", "content": prompt}
]
response = hf_client.chat_completion(
messages,
max_tokens=3800,
temperature=0.3
)
return response.choices[0].message.content
except Exception as e:
return f"뢄석 쀑 였λ₯˜ λ°œμƒ: {str(e)}"
def create_editable_space_analysis(progress=gr.Progress()) -> List[str]:
"""24개 슀페이슀 뢄석 ν…μŠ€νŠΈ 생성"""
try:
progress(0, desc="슀페이슀 데이터 κ°€μ Έμ˜€λŠ” 쀑...")
url = "https://huggingface.co/api/spaces"
response = requests.get(url, params={'full': 'true', 'limit': 24})
response.raise_for_status()
spaces = response.json()[:24]
analysis_texts = []
for idx, space in enumerate(spaces):
progress((idx + 1) / 24, desc=f"뢄석 쀑... {idx+1}/24")
try:
source = get_space_source(space['id'])
source_code = source["app.py"] or source["index.html"]
# ν”„λ‘œμ νŠΈλͺ…λ§Œ μΆ”μΆœ
project_name = space['id'].split('/')[-1]
prompt = f"""
λ‹€μŒ HuggingFace 슀페이슀λ₯Ό λΆ„μ„ν•˜μ—¬ λ‰΄μŠ€ 리포트 ν˜•μ‹μœΌλ‘œ μ„€λͺ…ν•΄μ£Όμ„Έμš”:
μ‹œμž‘μ€ λ°˜λ“œμ‹œ "였늘의 μΈκΈ°μˆœμœ„ {idx + 1}μœ„μΈ {project_name}μž…λ‹ˆλ‹€."둜 μ‹œμž‘ν•˜κ³ ,
μ΄μ–΄μ„œ μ£Όμš” κΈ°λŠ₯, νŠΉμ§•, ν™œμš©λ°©μ•ˆμ„ μžμ—°μŠ€λŸ½κ²Œ μ„€λͺ…ν•΄μ£Όμ„Έμš”.
μ†ŒμŠ€μ½”λ“œ:
```
{source_code[:1500]}
```
"""
messages = [
{"role": "system", "content": "AI 기술 μ „λ¬Έ λ‰΄μŠ€ λ¦¬ν¬ν„°μž…λ‹ˆλ‹€."},
{"role": "user", "content": prompt}
]
response = hf_client.chat_completion(
messages,
max_tokens=200,
temperature=0.7
)
analysis_texts.append(response.choices[0].message.content.strip())
except Exception as e:
analysis_texts.append(f"였늘의 μΈκΈ°μˆœμœ„ {idx + 1}μœ„μΈ {project_name}μž…λ‹ˆλ‹€.")
return analysis_texts
except Exception as e:
return [f"μˆœμœ„ {i+1}μœ„ 뢄석을 μ€€λΉ„μ€‘μž…λ‹ˆλ‹€." for i in range(24)]
def generate_video(texts: List[str], progress=gr.Progress()) -> str:
"""μ˜μƒ 생성"""
try:
temp_dir = tempfile.mkdtemp()
clips = []
# 인트둜 생성
intro_image = Image.open('intro.png')
intro_audio = gTTS(text=texts[0], lang='ko', slow=False)
intro_audio.save(f"{temp_dir}/intro.mp3")
intro_clip = ImageClip(np.array(intro_image)).set_duration(5) # 5초 λ˜λŠ” μ˜€λ””μ˜€ 길이에 맞게 μ‘°μ •
intro_audio_clip = AudioFileClip(f"{temp_dir}/intro.mp3")
intro_clip = intro_clip.set_audio(intro_audio_clip)
clips.append(intro_clip)
# 각 μŠ€νŽ˜μ΄μŠ€λ³„ 클립 생성
for idx, text in enumerate(texts[1:], 1):
progress((idx / 24), desc=f"μ˜μƒ 생성 쀑... {idx}/24")
# μŠ€ν¬λ¦°μƒ· 캑처
space_id = spaces[idx-1]['id']
url = f"https://huggingface.co/spaces/{space_id}"
screenshot = get_cached_screenshot(url)
# 이미지 클립 생성
image = Image.open(BytesIO(base64.b64decode(screenshot)))
image_clip = ImageClip(np.array(image)).set_duration(5) # 5초 λ˜λŠ” μ˜€λ””μ˜€ 길이에 맞게 μ‘°μ •
# μŒμ„± 생성
tts = gTTS(text=text, lang='ko', slow=False)
tts.save(f"{temp_dir}/audio_{idx}.mp3")
audio_clip = AudioFileClip(f"{temp_dir}/audio_{idx}.mp3")
# 이미지와 μŒμ„± κ²°ν•©
video_clip = image_clip.set_audio(audio_clip)
clips.append(video_clip)
# λͺ¨λ“  클립 μ—°κ²°
final_clip = concatenate_videoclips(clips)
# MP4둜 μ €μž₯
output_path = "output_video.mp4"
final_clip.write_videofile(output_path, fps=24, codec='libx264')
# μž„μ‹œ 파일 정리
shutil.rmtree(temp_dir)
return output_path
except Exception as e:
print(f"Error generating video: {e}")
if temp_dir:
shutil.rmtree(temp_dir)
return ""
def create_interface():
with gr.Blocks(title="HuggingFace Trending Board", css="""
.search-sort-container {
background: linear-gradient(135deg, rgba(255,255,255,0.95), rgba(240,240,255,0.95));
border-radius: 15px;
padding: 20px;
margin: 10px 0;
box-shadow: 0 4px 6px rgba(0,0,0,0.1);
overflow: visible;
}
.search-box {
border: 2px solid #e1e1e1;
border-radius: 10px;
padding: 12px;
transition: all 0.3s ease;
background: linear-gradient(135deg, #ffffff, #f8f9ff);
width: 100%;
}
.search-box:focus {
border-color: #7b61ff;
box-shadow: 0 0 0 2px rgba(123,97,255,0.2);
background: linear-gradient(135deg, #ffffff, #f0f3ff);
}
.refresh-btn {
background: linear-gradient(135deg, #7b61ff, #6366f1);
color: white;
border: none;
padding: 10px 20px;
border-radius: 10px;
cursor: pointer;
transition: all 0.3s ease;
width: 120px;
height: 80px !important;
display: flex;
align-items: center;
justify-content: center;
margin-left: auto;
font-size: 1.2em !important;
box-shadow: 0 4px 6px rgba(0,0,0,0.1);
}
.refresh-btn:hover {
transform: translateY(-2px);
box-shadow: 0 6px 12px rgba(0,0,0,0.2);
background: linear-gradient(135deg, #8b71ff, #7376f1);
}
""") as interface:
gr.Markdown("""
# πŸ€— HuggingFace Trending Board
<div style='margin-bottom: 20px; padding: 10px; background: linear-gradient(135deg, rgba(123,97,255,0.1), rgba(99,102,241,0.1)); border-radius: 10px;'>
Explore, search, and sort through the Shows Top 300 Trending spaces with AI Ratings
</div>
""")
with gr.Tabs() as tabs:
# Spaces νƒ­
with gr.Tab("🎯 Trending Spaces"):
with gr.Row(elem_classes="search-sort-container"):
with gr.Column(scale=2):
spaces_search = gr.Textbox(
label="πŸ” Search Spaces",
placeholder="Enter keywords to search...",
elem_classes="search-box"
)
with gr.Column(scale=2):
spaces_sort = gr.Radio(
choices=["rank", "rising_rate", "popularity"],
value="rank",
label="Sort by",
interactive=True
)
with gr.Column(scale=1):
spaces_refresh_btn = gr.Button(
"πŸ”„ Refresh",
variant="primary",
elem_classes="refresh-btn"
)
spaces_gallery = gr.HTML()
spaces_status = gr.Markdown("Loading...")
# Models νƒ­
with gr.Tab("πŸ€– Trending Models"):
with gr.Row(elem_classes="search-sort-container"):
with gr.Column(scale=2):
models_search = gr.Textbox(
label="πŸ” Search Models",
placeholder="Enter keywords to search...",
elem_classes="search-box"
)
with gr.Column(scale=2):
models_sort = gr.Radio(
choices=["rank", "rising_rate", "popularity"],
value="rank",
label="Sort by",
interactive=True
)
with gr.Column(scale=1):
models_refresh_btn = gr.Button(
"πŸ”„ Refresh",
variant="primary",
elem_classes="refresh-btn"
)
models_gallery = gr.HTML()
models_status = gr.Markdown("Loading...")
# Datasets νƒ­
with gr.Tab("πŸ“Š Trending Datasets"):
with gr.Row(elem_classes="search-sort-container"):
with gr.Column(scale=2):
datasets_search = gr.Textbox(
label="πŸ” Search Datasets",
placeholder="Enter keywords to search...",
elem_classes="search-box"
)
with gr.Column(scale=2):
datasets_sort = gr.Radio(
choices=["rank", "rising_rate", "popularity"],
value="rank",
label="Sort by",
interactive=True
)
with gr.Column(scale=1):
datasets_refresh_btn = gr.Button(
"πŸ”„ Refresh",
variant="primary",
elem_classes="refresh-btn"
)
datasets_gallery = gr.HTML()
datasets_status = gr.Markdown("Loading...")
# 뢄석 νƒ­ μˆ˜μ •
with gr.Tab("πŸ” Top 24 Spaces Analysis"):
with gr.Row(elem_classes="search-sort-container"):
analysis_refresh_btn = gr.Button(
"πŸ”„ Analyze All 24 Spaces",
variant="primary",
elem_classes="refresh-btn"
)
# νŽΈμ§‘ κ°€λŠ₯ν•œ 인트둜 ν…μŠ€νŠΈ
intro_text = gr.Textbox(
value="μ•ˆλ…•ν•˜μ„Έμš”. 맀일 κΈ€λ‘œλ²Œ μ΅œμ‹  AI 인기 νŠΈλ Œλ“œ μ„œλΉ„μŠ€λ₯Ό μ•Œμ•„λ³΄λŠ” '데일리 AI νŠΈλ Œλ”©' λ‰΄μŠ€μž…λ‹ˆλ‹€. 였늘의 ν—ˆκΉ…νŽ˜μ΄μŠ€ 인기 μˆœμœ„ 1μœ„λΆ€ν„° 24μœ„κΉŒμ§€, 뢄석과 핡심 λ‚΄μš©μ„ μ‚΄νŽ΄λ³΄κ² μŠ΅λ‹ˆλ‹€.",
label="인트둜 ν…μŠ€νŠΈ",
lines=4
)
# 24개의 νŽΈμ§‘ κ°€λŠ₯ν•œ ν…μŠ€νŠΈ λ°•μŠ€λ₯Ό 담을 μ»¨ν…Œμ΄λ„ˆ
with gr.Column(elem_id="analysis-container"):
analysis_boxes = [gr.Textbox(label=f"Space #{i+1}", lines=3) for i in range(24)]
analysis_status = gr.Markdown()
# λΉ„λ””μ˜€ 생성 μ„Ήμ…˜
with gr.Row():
generate_btn = gr.Button(
"🎬 μ˜μƒ 생성",
variant="primary",
size="lg"
)
video_output = gr.Video(label="μƒμ„±λœ μ˜μƒ")
# Event handlers
spaces_refresh_btn.click(
fn=get_trending_spaces,
inputs=[spaces_search, spaces_sort],
outputs=[spaces_gallery, spaces_status]
)
models_refresh_btn.click(
fn=get_models,
inputs=[models_search, models_sort],
outputs=[models_gallery, models_status]
)
datasets_refresh_btn.click(
fn=get_datasets,
inputs=[datasets_search, datasets_sort],
outputs=[datasets_gallery, datasets_status]
)
# 검색어 λ³€κ²½ μ‹œ μžλ™ μƒˆλ‘œκ³ μΉ¨
spaces_search.change(
fn=get_trending_spaces,
inputs=[spaces_search, spaces_sort],
outputs=[spaces_gallery, spaces_status]
)
models_search.change(
fn=get_models,
inputs=[models_search, models_sort],
outputs=[models_gallery, models_status]
)
datasets_search.change(
fn=get_datasets,
inputs=[datasets_search, datasets_sort],
outputs=[datasets_gallery, datasets_status]
)
# μ •λ ¬ 방식 λ³€κ²½ μ‹œ μžλ™ μƒˆλ‘œκ³ μΉ¨
spaces_sort.change(
fn=get_trending_spaces,
inputs=[spaces_search, spaces_sort],
outputs=[spaces_gallery, spaces_status]
)
models_sort.change(
fn=get_models,
inputs=[models_search, models_sort],
outputs=[models_gallery, models_status]
)
datasets_sort.change(
fn=get_datasets,
inputs=[datasets_search, datasets_sort],
outputs=[datasets_gallery, datasets_status]
)
# 뢄석 νƒ­ 이벀트 ν•Έλ“€λŸ¬
analysis_refresh_btn.click(
fn=on_analyze,
outputs=analysis_boxes
)
generate_btn.click(
fn=on_generate_video,
inputs=[intro_text] + analysis_boxes,
outputs=video_output
)
# 초기 데이터 λ‘œλ“œ
interface.load(
fn=get_trending_spaces,
inputs=[spaces_search, spaces_sort],
outputs=[spaces_gallery, spaces_status]
)
interface.load(
fn=get_models,
inputs=[models_search, models_sort],
outputs=[models_gallery, models_status]
)
interface.load(
fn=get_datasets,
inputs=[datasets_search, datasets_sort],
outputs=[datasets_gallery, datasets_status]
)
interface.load(
fn=on_analyze,
outputs=analysis_boxes
)
return interface
# 뢄석 및 λΉ„λ””μ˜€ 생성 ν•¨μˆ˜λ“€
def on_analyze(progress=gr.Progress()):
"""뢄석 μ‹€ν–‰ 및 ν…μŠ€νŠΈλ°•μŠ€ μ—…λ°μ΄νŠΈ"""
try:
url = "https://huggingface.co/api/spaces"
response = requests.get(url, params={'full': 'true', 'limit': 24})
response.raise_for_status()
spaces = response.json()[:24]
analysis_texts = []
for idx, space in enumerate(spaces):
progress((idx + 1) / 24, desc=f"뢄석 쀑... {idx+1}/24")
try:
source = get_space_source(space['id'])
source_code = source["app.py"] or source["index.html"]
project_name = space['id'].split('/')[-1]
prompt = f"""
λ‹€μŒ HuggingFace 슀페이슀λ₯Ό λΆ„μ„ν•˜μ—¬ λ‰΄μŠ€ 리포트 ν˜•μ‹μœΌλ‘œ μ„€λͺ…ν•΄μ£Όμ„Έμš”:
μ‹œμž‘μ€ λ°˜λ“œμ‹œ "였늘의 μΈκΈ°μˆœμœ„ {idx + 1}μœ„μΈ {project_name}μž…λ‹ˆλ‹€."둜 μ‹œμž‘ν•˜κ³ ,
μ΄μ–΄μ„œ μ£Όμš” κΈ°λŠ₯, νŠΉμ§•, ν™œμš©λ°©μ•ˆμ„ μžμ—°μŠ€λŸ½κ²Œ μ„€λͺ…ν•΄μ£Όμ„Έμš”.
μ†ŒμŠ€μ½”λ“œ:
```
{source_code[:1500]}
```
"""
messages = [
{"role": "system", "content": "AI 기술 μ „λ¬Έ λ‰΄μŠ€ λ¦¬ν¬ν„°μž…λ‹ˆλ‹€."},
{"role": "user", "content": prompt}
]
response = hf_client.chat_completion(
messages,
max_tokens=200,
temperature=0.7
)
analysis_texts.append(response.choices[0].message.content.strip())
except Exception as e:
analysis_texts.append(f"였늘의 μΈκΈ°μˆœμœ„ {idx + 1}μœ„μΈ {project_name}μž…λ‹ˆλ‹€.")
# μ •ν™•νžˆ 24개의 값을 λ°˜ν™˜ν•˜λ„λ‘ 보μž₯
if len(analysis_texts) < 24:
analysis_texts.extend([f"μˆœμœ„ {i+1}μœ„ 뢄석을 μ€€λΉ„μ€‘μž…λ‹ˆλ‹€." for i in range(len(analysis_texts), 24)])
return analysis_texts[:24]
except Exception as e:
return [f"μˆœμœ„ {i+1}μœ„ 뢄석을 μ€€λΉ„μ€‘μž…λ‹ˆλ‹€." for i in range(24)]
def on_generate_video(intro, *texts, progress=gr.Progress()):
"""μ˜μƒ 생성"""
all_texts = [intro] + list(texts)
return generate_video(all_texts, progress)
if __name__ == "__main__":
try:
CACHE_DIR.mkdir(exist_ok=True)
cleanup_cache()
demo = create_interface()
demo.launch(
share=True,
inbrowser=True,
show_api=False,
max_threads=4
)
except Exception as e:
print(f"Application error: {e}")