kkhushisaid commited on
Commit
63c4b45
1 Parent(s): 12923d9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -37
app.py CHANGED
@@ -1,33 +1,36 @@
1
  import gradio as gr
2
  import pickle
3
  import pandas as pd
 
 
 
4
 
5
- # load the data
6
- heart=pd.read_csv('heart.dat', header=None, sep=' ', names=['age', 'sex', 'cp', 'trestbps', 'chol',
 
 
 
7
  'fbs', 'restecg', 'thalach', 'exang',
8
  'oldpeak', 'slope', 'ca', 'thal', 'heart disease'])
9
 
10
- # load the saved models
11
- with open('Tree.pkl', 'rb') as f:
12
- tree_model = pickle.load(f)
13
-
14
- with open('svm.pkl', 'rb') as f:
15
- svm_model = pickle.load(f)
16
-
17
- with open('QDA.pkl', 'rb') as f:
18
- qda_model = pickle.load(f)
19
-
20
- with open('MLP.pkl', 'rb') as f:
21
- mlp_model = pickle.load(f)
22
 
23
- with open('Log.pkl', 'rb') as f:
24
- log_model = pickle.load(f)
25
-
26
- with open('LDA.pkl', 'rb') as f:
27
- lda_model = pickle.load(f)
28
-
29
- with open('For.pkl', 'rb') as f:
30
- for_model = pickle.load(f)
 
31
 
32
  # Define the function to make predictions
33
  def make_prediction(age, sex, cp, trestbps, chol, fbs, restecg, thalach, exang, oldpeak, slope, ca, thal, model_name):
@@ -48,27 +51,20 @@ def make_prediction(age, sex, cp, trestbps, chol, fbs, restecg, thalach, exang,
48
  'thal': [thal]
49
  })
50
 
51
- # feature scaling
52
- from sklearn.model_selection import train_test_split
53
  X = heart.drop('heart disease', axis=1)
54
  y = heart['heart disease']
55
  X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=42, stratify=y)
56
- from sklearn.preprocessing import StandardScaler
57
  scaler = StandardScaler()
58
  X_train_std = scaler.fit_transform(X_train)
59
 
60
- # choose the model and make prediction
61
- model_dict = {'Decision_Tree': tree_model,
62
- 'QDA': qda_model,
63
- 'Artificial_Neural_Networks': mlp_model,
64
- 'Logistic_Regression': log_model,
65
- 'LDA': lda_model,
66
- 'Random_Forest': for_model,
67
- 'SVM': svm_model}
68
- model = model_dict[model_name]
69
  input_data_std = scaler.transform(input_data)
70
  probas = model.predict_proba(input_data_std)
71
- outtext={1:'no heart_disease', 2:'heart disease'}
72
  return {f"Probability of Class {i+1}": proba for i, proba in enumerate(probas[0])}
73
 
74
  # Create the Gradio interface
@@ -86,9 +82,9 @@ inputs = [
86
  gr.inputs.Dropdown(choices=[1,2,3], label='slope ST'),
87
  gr.inputs.Dropdown(choices=[0,1,2,3], label='major vessels'),
88
  gr.inputs.Dropdown(choices=[3,6,7], label='thal'),
89
- gr.inputs.Dropdown(choices=['Decision_Tree', 'QDA', 'Artificial_Neural_Networks', 'Logistic_Regression', 'LDA', 'Random_Forest', 'SVM'], label='Select the model')
90
  ]
91
 
92
  outputs = gr.outputs.Label(label='Predicted class probabilities')
93
 
94
- gr.Interface(fn=make_prediction, inputs=inputs, outputs=outputs).launch()
 
1
  import gradio as gr
2
  import pickle
3
  import pandas as pd
4
+ from sklearn.preprocessing import StandardScaler
5
+ from sklearn.model_selection import train_test_split
6
+ from sklearn import __version__ as sklearn_version
7
 
8
+ # Check scikit-learn version
9
+ print(f"scikit-learn version: {sklearn_version}")
10
+
11
+ # Load the data
12
+ heart = pd.read_csv('heart.dat', header=None, sep=' ', names=['age', 'sex', 'cp', 'trestbps', 'chol',
13
  'fbs', 'restecg', 'thalach', 'exang',
14
  'oldpeak', 'slope', 'ca', 'thal', 'heart disease'])
15
 
16
+ # Load the saved models with error handling
17
+ def load_model(filename):
18
+ try:
19
+ with open(filename, 'rb') as f:
20
+ return pickle.load(f)
21
+ except Exception as e:
22
+ print(f"Error loading {filename}: {e}")
23
+ return None
 
 
 
 
24
 
25
+ models = {
26
+ 'Tree': load_model('Tree.pkl'),
27
+ 'SVM': load_model('svm.pkl'),
28
+ 'QDA': load_model('QDA.pkl'),
29
+ 'MLP': load_model('MLP.pkl'),
30
+ 'Log': load_model('Log.pkl'),
31
+ 'LDA': load_model('LDA.pkl'),
32
+ 'For': load_model('For.pkl')
33
+ }
34
 
35
  # Define the function to make predictions
36
  def make_prediction(age, sex, cp, trestbps, chol, fbs, restecg, thalach, exang, oldpeak, slope, ca, thal, model_name):
 
51
  'thal': [thal]
52
  })
53
 
54
+ # Feature scaling
 
55
  X = heart.drop('heart disease', axis=1)
56
  y = heart['heart disease']
57
  X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=42, stratify=y)
 
58
  scaler = StandardScaler()
59
  X_train_std = scaler.fit_transform(X_train)
60
 
61
+ # Choose the model and make prediction
62
+ model = models.get(model_name)
63
+ if model is None:
64
+ return "Model not found or failed to load."
65
+
 
 
 
 
66
  input_data_std = scaler.transform(input_data)
67
  probas = model.predict_proba(input_data_std)
 
68
  return {f"Probability of Class {i+1}": proba for i, proba in enumerate(probas[0])}
69
 
70
  # Create the Gradio interface
 
82
  gr.inputs.Dropdown(choices=[1,2,3], label='slope ST'),
83
  gr.inputs.Dropdown(choices=[0,1,2,3], label='major vessels'),
84
  gr.inputs.Dropdown(choices=[3,6,7], label='thal'),
85
+ gr.inputs.Dropdown(choices=['Tree', 'QDA', 'MLP', 'Log', 'LDA', 'For', 'SVM'], label='Select the model')
86
  ]
87
 
88
  outputs = gr.outputs.Label(label='Predicted class probabilities')
89
 
90
+ gr.Interface(fn=make_prediction, inputs=inputs, outputs=outputs).launch()