pushpikaLiyanagama
commited on
Commit
•
37ef436
1
Parent(s):
20065a0
Upload 7 files
Browse files- app.py +61 -0
- requirements.txt +5 -0
- scaler.joblib +3 -0
- svm_model_input.joblib +3 -0
- svm_model_perception.joblib +3 -0
- svm_model_processing.joblib +3 -0
- svm_model_understanding.joblib +3 -0
app.py
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import joblib
|
3 |
+
import numpy as np
|
4 |
+
|
5 |
+
# Load the scaler and models
|
6 |
+
scaler = joblib.load('scaler.joblib')
|
7 |
+
models = {
|
8 |
+
"processing": joblib.load('svm_model_processing.joblib'),
|
9 |
+
"perception": joblib.load('svm_model_perception.joblib'),
|
10 |
+
"input": joblib.load('svm_model_input.joblib'),
|
11 |
+
"understanding": joblib.load('svm_model_understanding.joblib'),
|
12 |
+
}
|
13 |
+
|
14 |
+
# Define the prediction function
|
15 |
+
def predict(user_input):
|
16 |
+
# Ensure the input is in the same order as your model expects
|
17 |
+
user_input_array = np.array(user_input).reshape(1, -1)
|
18 |
+
|
19 |
+
# Scale the input using the saved scaler
|
20 |
+
user_input_scaled = scaler.transform(user_input_array)
|
21 |
+
|
22 |
+
# Predict outcomes for all target variables
|
23 |
+
predictions = {}
|
24 |
+
for target, model in models.items():
|
25 |
+
prediction = model.predict(user_input_scaled)
|
26 |
+
predictions[target] = prediction[0]
|
27 |
+
|
28 |
+
return predictions
|
29 |
+
|
30 |
+
# Streamlit UI
|
31 |
+
st.title("ML Prediction Application")
|
32 |
+
st.header("Input your data for predictions")
|
33 |
+
|
34 |
+
# Create input fields for user input
|
35 |
+
columns = [
|
36 |
+
'Course Overview', 'Reading File', 'Abstract Materiale',
|
37 |
+
'Concrete Material', 'Visual Materials', 'Self-Assessment',
|
38 |
+
'Exercises Submit', 'Quiz Submitted', 'Playing', 'Paused',
|
39 |
+
'Unstarted', 'Buffering'
|
40 |
+
]
|
41 |
+
|
42 |
+
user_input = []
|
43 |
+
for col in columns:
|
44 |
+
value = st.number_input(f"{col}", value=0.0)
|
45 |
+
user_input.append(value)
|
46 |
+
|
47 |
+
# Button for making predictions
|
48 |
+
if st.button("Predict"):
|
49 |
+
# Ensure proper input and predict
|
50 |
+
try:
|
51 |
+
predictions = predict(user_input)
|
52 |
+
st.subheader("Predictions")
|
53 |
+
st.json(predictions)
|
54 |
+
except Exception as e:
|
55 |
+
st.error(f"An error occurred: {e}")
|
56 |
+
|
57 |
+
# Share instructions for deployment
|
58 |
+
st.markdown("""
|
59 |
+
- To run the app, execute `streamlit run app.py` in your terminal.
|
60 |
+
- Make sure the `scaler.joblib` and model files are in the same directory as this script.
|
61 |
+
""")
|
requirements.txt
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
joblib
|
2 |
+
gradio>=3.0,<4.0
|
3 |
+
numpy>=1.21.0
|
4 |
+
scikit-learn==1.5.2
|
5 |
+
pandas
|
scaler.joblib
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2d40f613f3b8b7bd9b51dc6b13631dd07ebdf6b373e41e6c5fd9d7cf20af814d
|
3 |
+
size 1431
|
svm_model_input.joblib
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:424a08119df19f4109d23e0d5f17084f0d3520c6a3c7eb2b137290dbf07e8d41
|
3 |
+
size 34539
|
svm_model_perception.joblib
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:39e1f950e7427bde99c6de38bb977fb21217b36b1fa0eec4a4f68b2f58b0a99a
|
3 |
+
size 30139
|
svm_model_processing.joblib
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7f6e8e1f74fb2d1f78144fd1cc820aa3af538459b07429d2e82abb9a3d2e5a2d
|
3 |
+
size 68651
|
svm_model_understanding.joblib
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1dac2d4345f1e12fb913694390fa16a77ab781ad209e8433db4dc98d3d132423
|
3 |
+
size 53451
|