Spaces:
Sleeping
Sleeping
File size: 1,903 Bytes
49a2293 47e2f84 49a2293 fd4767e e041890 885048d c5bd5f7 e041890 885048d e041890 47e2f84 c5bd5f7 e041890 c5bd5f7 e041890 c5bd5f7 e041890 c5bd5f7 e041890 885048d e041890 49a2293 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 |
import gradio as gr
from transformers import pipeline
# λͺ¨λΈ λ‘λ
try:
generator = pipeline("text-generation", model="bigscience/bloomz-560m") # νμμ λ°λΌ λͺ¨λΈ λ³κ²½
except Exception as e:
generator = None
error_message = f"λͺ¨λΈ λ‘λ μ€λ₯: {str(e)}"
# μλ΅ μμ± ν¨μ
def generate_reply(review):
if not generator:
return f"λͺ¨λΈμ λ‘λν μ μμ΅λλ€. μ€λ₯: {error_message}"
# ν둬ννΈ μμ±
prompt = f"""
λ€μμ κ³ κ° λ¦¬λ·°μ
λλ€:
리뷰: "{review}"
μ΄ λ¦¬λ·°μ λν΄ μ μ€νκ³ κ°μ¬μ λ»μ λ΄μ κ³ κ° μλΉμ€ νμ λ΅λ³μ μμ±νμΈμ.
λ΅λ³ μ: "κ³ κ°λμ μμ€ν μ견 κ°μ¬ν©λλ€. μμΌλ‘λ λ λμ μλΉμ€λ₯Ό μ 곡νκΈ° μν΄ λ
Έλ ₯νκ² μ΅λλ€."
"""
try:
# λͺ¨λΈ νΈμΆ
result = generator(prompt, max_new_tokens=50, do_sample=True, temperature=0.7)
# λλ²κΉ
μ 보 μΆλ ₯
debug_info = f"μ
λ ₯λ ν둬ννΈ: {prompt}\nλͺ¨λΈ μλ΅: {result}"
if result and "generated_text" in result[0]:
generated_text = result[0]["generated_text"]
# ν둬ννΈ λ΄μ© μ κ±°
if prompt in generated_text:
generated_text = generated_text.replace(prompt, "").strip()
return f"μμ±λ λ΅λ³: {generated_text}\n\n[λλ²κΉ
μ 보]\n{debug_info}"
else:
return f"μλ΅ μ²λ¦¬ μ€ λ¬Έμ κ° λ°μνμ΅λλ€.\n\n[λλ²κΉ
μ 보]\n{debug_info}"
except Exception as e:
return f"API νΈμΆ μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
# Gradio μΈν°νμ΄μ€ μ€μ
iface = gr.Interface(
fn=generate_reply,
inputs="text",
outputs="text",
title="Review Reply Generator",
description="κ³ κ° λ¦¬λ·°λ₯Ό μ
λ ₯νλ©΄ μ μ€ν λ΅λ³μ μμ±ν©λλ€."
)
# Space μ€ν
iface.launch()
|