Spaces:
Sleeping
Sleeping
import streamlit as st | |
import torch | |
from transformers import BertConfig, BertForSequenceClassification, BertTokenizer | |
import numpy as np | |
# Load the model and tokenizer | |
def load_model(): | |
tokenizer = BertTokenizer.from_pretrained('beomi/kcbert-base') | |
config = BertConfig.from_pretrained('beomi/kcbert-base', num_labels=7) | |
model = BertForSequenceClassification.from_pretrained('beomi/kcbert-base', config=config) | |
model_state_dict = torch.load('sentiment7_model_acc8878.pth', map_location=torch.device('cpu')) # cpu μ¬μ© | |
model.load_state_dict(model_state_dict) | |
model.eval() | |
return model, tokenizer | |
model, tokenizer = load_model() | |
# Define the inference function | |
def inference(input_doc): | |
inputs = tokenizer(input_doc, return_tensors='pt') | |
outputs = model(**inputs) | |
probs = torch.softmax(outputs.logits, dim=1).squeeze().tolist() | |
class_idx = {'곡ν¬': 0, 'λλ': 1, 'λΆλ Έ': 2, 'μ¬ν': 3, 'μ€λ¦½': 4, 'ν볡': 5, 'νμ€': 6} | |
results = {class_name: prob for class_name, prob in zip(class_idx, probs)} | |
# Find the class with the highest probability | |
max_prob_class = max(results, key=results.get) | |
max_prob = results[max_prob_class] | |
# Display results | |
return [results, f"κ°μ₯ κ°νκ² λνλ κ°μ : {max_prob_class}"] | |
''' for class_name, prob in results.items(): | |
print(f"{class_name}: {prob:.2%}")''' | |
# Set up the Streamlit interface | |
st.title('κ°μ λΆμ(Sentiment Analysis)') | |
st.markdown('<small style="color:grey;">κΈμ λνλ 곡ν¬, λλ, λΆλ Έ, μ¬ν, μ€λ¦½, ν볡, νμ€μ μ λλ₯Ό λΉμ¨λ‘ μλ €λ립λλ€.</small>', unsafe_allow_html=True) | |
user_input = st.text_area("μ΄ κ³³μ κΈ μ λ ₯(100μ μ΄ν κΆμ₯):") | |
if st.button('μμ'): | |
result = inference(user_input) | |
st.write(result[0]) | |
st.write(result[1]) | |