diary2 / app.py
tkdehf2's picture
Create app.py
401f1d2 verified
raw
history blame contribute delete
No virus
1.59 kB
from transformers import GPT2LMHeadModel, GPT2Tokenizer
def generate_diary(emotion, num_samples=1, max_length=100, temperature=0.7):
# ๊ฐ์ •์„ ๊ธฐ๋ฐ˜์œผ๋กœ ์ผ๊ธฐ๋ฅผ ์ƒ์„ฑํ•  ํ† ํฌ๋‚˜์ด์ €์™€ ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ
tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
model = GPT2LMHeadModel.from_pretrained("gpt2")
# ๊ฐ์ •์— ๋”ฐ๋ผ prefix ๋ฌธ์žฅ ์ƒ์„ฑ
if emotion == "happy":
prefix = "์˜ค๋Š˜์€ ๊ธฐ๋ถ„์ด ์ข‹์•„์š”. "
elif emotion == "sad":
prefix = "์Šฌํ”ˆ ๊ธฐ๋ถ„์ด์—์š”. "
elif emotion == "angry":
prefix = "ํ™”๊ฐ€ ์น˜๋ฐ€์–ด ์˜ค๋ฅด๋Š” ๊ธฐ๋ถ„์ด์—์š”. "
else:
prefix = "์˜ค๋Š˜์€ ๊ธฐ๋ถ„์ด ์ด์ƒํ•ด์š”. "
# prefix๋ฅผ ํ† ํฌ๋‚˜์ด์ง•ํ•˜์—ฌ ์ž…๋ ฅ ์‹œํ€€์Šค ์ƒ์„ฑ
input_sequence = tokenizer.encode(prefix, return_tensors="pt")
# ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ํ…์ŠคํŠธ ์ƒ์„ฑ
output = model.generate(
input_sequence,
max_length=max_length,
num_return_sequences=num_samples,
temperature=temperature,
pad_token_id=tokenizer.eos_token_id
)
# ์ƒ์„ฑ๋œ ์ผ๊ธฐ ๋ฐ˜ํ™˜
return [tokenizer.decode(output_sequence, skip_special_tokens=True) for output_sequence in output]
def main():
# ์‚ฌ์šฉ์ž๋กœ๋ถ€ํ„ฐ ๊ฐ์ • ์ž…๋ ฅ ๋ฐ›๊ธฐ
emotion = input("์˜ค๋Š˜์˜ ๊ฐ์ •์„ ์ž…๋ ฅํ•˜์„ธ์š” (happy, sad, angry ๋“ฑ): ")
# ์ผ๊ธฐ ์ƒ์„ฑ
diary_entries = generate_diary(emotion)
# ์ƒ์„ฑ๋œ ์ผ๊ธฐ ์ถœ๋ ฅ
print("์˜ค๋Š˜์˜ ์ผ๊ธฐ:")
for i, entry in enumerate(diary_entries, start=1):
print(f"{i}. {entry}")
if __name__ == "__main__":
main()