ferdmartin commited on
Commit
e59445e
ยท
1 Parent(s): a77cc5d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -29
app.py CHANGED
@@ -18,7 +18,6 @@ def main():
18
  import eli5
19
  import shap
20
  from custom_models import HF_DistilBertBasedModelAppDocs, HF_BertBasedModelAppDocs
21
- import docx
22
 
23
  # Initialize Spacy
24
  nlp = spacy.load("en_core_web_sm")
@@ -93,24 +92,8 @@ def main():
93
  else:
94
  model = HF_DistilBertBasedModelAppDocs.from_pretrained("ferdmartin/HF_DistilBertBasedModelAppDocs").to(device)
95
  return model
96
-
97
- def app_model(option, model, text):
98
- with st.spinner("Wait for the magic ๐Ÿช„๐Ÿ”ฎ"):
99
- # Use model
100
- if option in ("Naive Bayes", "Logistic Regression"):
101
- prediction, predict_proba = nb_lr(model, text)
102
- st.session_state["sklearn"] = True
103
- else:
104
- prediction, predict_proba = torch_pred(tokenizer, model, text)
105
- st.session_state["torch"] = True
106
 
107
- # Store the result in session state
108
- st.session_state["color_pred"] = "blue" if prediction == 0 else "red"
109
- prediction = pred_str(prediction)
110
- st.session_state["prediction"] = prediction
111
- st.session_state["predict_proba"] = predict_proba
112
- st.session_state["text"] = text
113
-
114
  # Streamlit app:
115
 
116
  models_available = {"Logistic Regression":"models/baseline_model_lr2.joblib",
@@ -121,7 +104,7 @@ def main():
121
 
122
  st.set_page_config(page_title="AI/Human GradAppDocs", page_icon="๐Ÿค–", layout="wide")
123
  st.title("Academic Application Document Classifier")
124
- st.header("Is it human-made ๐Ÿ“ or Generated with AI ๐Ÿฆพ ? ")
125
 
126
  # Check the model to use
127
  def restore_prediction_state():
@@ -148,18 +131,32 @@ def main():
148
  </style>
149
  """
150
  st.markdown(hide_st_style, unsafe_allow_html=True)
151
- col1, col2 = st.columns(2)
152
  # Use model
153
- with col1:
154
- if st.button("Let's check this text!"):
155
- if text.strip() == "":
156
- st.error("Please enter some text")
157
- else:
158
- app_model(option, model, text)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
159
  # Print result
160
- #st.markdown(f"I think this text is: **:{st.session_state['color_pred']}[{st.session_state['prediction']}]** (Prediction probability: {st.session_state['predict_proba'] * 100}%)")
161
-
162
- if "prediction" in st.session_state:
163
  # Display the stored result if available
164
  st.markdown(f"I think this text is: **:{st.session_state['color_pred']}[{st.session_state['prediction']}]** (Prediction probability: {st.session_state['predict_proba'] * 100}%)")
165
 
 
18
  import eli5
19
  import shap
20
  from custom_models import HF_DistilBertBasedModelAppDocs, HF_BertBasedModelAppDocs
 
21
 
22
  # Initialize Spacy
23
  nlp = spacy.load("en_core_web_sm")
 
92
  else:
93
  model = HF_DistilBertBasedModelAppDocs.from_pretrained("ferdmartin/HF_DistilBertBasedModelAppDocs").to(device)
94
  return model
 
 
 
 
 
 
 
 
 
 
95
 
96
+
 
 
 
 
 
 
97
  # Streamlit app:
98
 
99
  models_available = {"Logistic Regression":"models/baseline_model_lr2.joblib",
 
104
 
105
  st.set_page_config(page_title="AI/Human GradAppDocs", page_icon="๐Ÿค–", layout="wide")
106
  st.title("Academic Application Document Classifier")
107
+ st.header("Is it human-made ๐Ÿ“ or Generated with AI ๐Ÿค– ? ")
108
 
109
  # Check the model to use
110
  def restore_prediction_state():
 
131
  </style>
132
  """
133
  st.markdown(hide_st_style, unsafe_allow_html=True)
134
+
135
  # Use model
136
+ if st.button("Let's check this text!"):
137
+ if text.strip() == "":
138
+ st.error("Please enter some text")
139
+ else:
140
+ with st.spinner("Wait for the magic ๐Ÿช„๐Ÿ”ฎ"):
141
+ # Use model
142
+ if option in ("Naive Bayes", "Logistic Regression"):
143
+ prediction, predict_proba = nb_lr(model, text)
144
+ st.session_state["sklearn"] = True
145
+ else:
146
+ prediction, predict_proba = torch_pred(tokenizer, model, text)
147
+ st.session_state["torch"] = True
148
+
149
+ # Store the result in session state
150
+ st.session_state["color_pred"] = "blue" if prediction == 0 else "red"
151
+ prediction = pred_str(prediction)
152
+ st.session_state["prediction"] = prediction
153
+ st.session_state["predict_proba"] = predict_proba
154
+ st.session_state["text"] = text
155
+
156
  # Print result
157
+ st.markdown(f"I think this text is: **:{st.session_state['color_pred']}[{st.session_state['prediction']}]** (Prediction probability: {st.session_state['predict_proba'] * 100}%)")
158
+
159
+ elif "prediction" in st.session_state:
160
  # Display the stored result if available
161
  st.markdown(f"I think this text is: **:{st.session_state['color_pred']}[{st.session_state['prediction']}]** (Prediction probability: {st.session_state['predict_proba'] * 100}%)")
162