Upload app.py
Browse files
app.py
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# ์ถ๋ก
|
2 |
+
from transformers import RobertaTokenizer, RobertaForSequenceClassification
|
3 |
+
import torch
|
4 |
+
import streamlit as st
|
5 |
+
|
6 |
+
# ์ ์ดํ์ต์ ์ฌ์ฉํ ํ ํฌ๋์ด์ ์ ๋ชจ๋ธ ๋ก๋ & ๊ฐ์ค์น ๋ก๋
|
7 |
+
tokenizer = RobertaTokenizer.from_pretrained('beomi/KcBERT-v2023')
|
8 |
+
model = RobertaForSequenceClassification.from_pretrained('beomi/KcBERT-v2023', num_labels=2)
|
9 |
+
model.load_state_dict(torch.load("pytorchmodel_518๋ง์ธ๋ถ๋ฅ_acc9308.bin"))
|
10 |
+
# ๋ชจ๋ธ์ ํ๊ฐ ๋ชจ๋๋ก ์ค์
|
11 |
+
model.eval()
|
12 |
+
|
13 |
+
# ์
๋ ฅ ํ
์คํธ ์์
|
14 |
+
class_labels = ["์ ์ (518๋ง์ธ_NO)", "๋ถ์ ์ (518๋ง์ธ_YES)"]
|
15 |
+
def inference(new_text):
|
16 |
+
inputs = tokenizer(new_text, return_tensors="pt")
|
17 |
+
# ์ถ๋ก ์ํ (CPU ์ฌ์ฉ)
|
18 |
+
with torch.no_grad():
|
19 |
+
outputs = model(**inputs)
|
20 |
+
logits = outputs.logits
|
21 |
+
probs = torch.nn.functional.softmax(logits, dim=-1)
|
22 |
+
predicted_class = torch.argmax(probs, dim=1).item()
|
23 |
+
predicted_label = class_labels[predicted_class]
|
24 |
+
probability = probs[0][predicted_class].item()
|
25 |
+
return f"์์ธก: {predicted_label}, ํ๋ฅ : {probability:.4f}"
|
26 |
+
|
27 |
+
# Streamlit interface
|
28 |
+
st.title('5ยท18 ๋ฏผ์ฃผํ์ด๋ ๊ด๋ จ ๋ถ์ ์ ํ ๋ฐ์ธ ํ์ง')
|
29 |
+
st.markdown('<small style="color:grey;">5ยท18 ๋ฏผ์ฃผํ์ด๋๊ณผ ๊ด๋ จํด ๋ฌด์ฅ ํญ๋, ๋ถํ๊ตฐ ๊ฐ์
, ๊ฐ์ง ์ ๊ณต์ ๋ฑ ๋ถ์ ์ ํ ์ธ๊ธ๊ณผ ์ง์ญ-์ด๋
์ ๋ํ ํ์ค์ฑ ๋ฐ์ธ์ด ๋ฌธ์ ๋๊ณ ์์ต๋๋ค. ์๋์ ๋ฌธ์ฅ์ ์
๋ ฅํ๋ฉด ์ด๋ฌํ ๋ด์ฉ์ ์ค์ฌ์ผ๋ก ๋ฌธ์ฅ์ ๋ถ์ ์ ์ฑ ์ฌ๋ถ๋ฅผ ํ๋ฅ ๊ณผ ํจ๊ป ํ๋จํด ๋๋ฆฝ๋๋ค. </small>', unsafe_allow_html=True)
|
30 |
+
user_input = st.text_area("์ด ๊ณณ์ ๋ฌธ์ฅ ์
๋ ฅ(100์ ์ดํ ๊ถ์ฅ):")
|
31 |
+
if st.button('์์'):
|
32 |
+
result = inference(user_input)
|
33 |
+
st.write(result)
|