huabdul commited on
Commit
b8c493d
1 Parent(s): dfa0f6d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -43
app.py CHANGED
@@ -42,37 +42,35 @@ def create_plot(x1, y1, x2, y2, cov1, cov2, n1, n2, max_depth, n_estimators):
42
 
43
  clf.fit(X, y)
44
 
45
- fig = plt.figure(figsize=(12, 5))
46
- ax = fig.add_subplot(121)
47
 
48
  xx, yy, Z = get_decision_surface(X, y, clf)
49
- ax.contourf(xx, yy, Z, cmap=CMAP, alpha=0.65)
50
 
51
  X1, y1 = X[y==0], y[y==0]
52
  X2, y2 = X[y==1], y[y==1]
53
 
54
- ax.scatter(X1[:, 0], X1[:, 1], c=C1, edgecolor='k', s=40, label='Class A')
55
- ax.scatter(X2[:, 0], X2[:, 1], c=C2, edgecolor='k', s=40, label='Class B')
56
 
57
- ax.set_xlabel('x'); ax.set_ylabel('y')
58
  ax.legend()
59
  ax.set_title(f'AdaBoostClassifier Decision Surface')
60
 
61
  scores = clf.decision_function(X)
62
 
63
- ax = fig.add_subplot(122)
64
  ax.hist(scores[y==0], bins=100, range=(scores.min(), scores.max()), facecolor=C1, label="Class A", alpha=0.5, edgecolor="k")
65
  ax.hist(scores[y==1], bins=100, range=(scores.min(), scores.max()), facecolor=C2, label="Class B", alpha=0.5, edgecolor="k")
66
 
67
  ax.set_xlabel('Score'); ax.set_ylabel('Frequency')
68
  ax.legend()
69
  ax.set_title('Decision Scores')
 
70
 
71
  return fig
72
 
73
  info = '''
74
- # AdaBoost Classifier Example on Gaussian Quantile Generated Data.
75
-
76
  This example fits an [AdaBoost classifier](https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.AdaBoostClassifier.html#sklearn.ensemble.AdaBoostClassifier) on two non-linearly separable classes. The samples are generated using two [Gaussian quantiles](https://scikit-learn.org/stable/modules/generated/sklearn.datasets.make_gaussian_quantiles.html#sklearn.datasets.make_gaussian_quantiles) of configurable mean and covariance (see the sliders below).
77
 
78
  For the first generated Gaussian, the inner half quantile is assigned to Class A and the outer half quantile is assigned to class B. For the second generated quantile, the opposite assignment happens (inner = Class B, outer = Class A).
@@ -84,43 +82,44 @@ Use the controls below to change the Gaussian distribution parameters, number of
84
  Created by [@huabdul](https://huggingface.co/huabdul) based on [Scikit-learn docs](https://scikit-learn.org/stable/auto_examples/ensemble/plot_adaboost_twoclass.html).
85
  '''
86
  with gr.Blocks(analytics_enabled=False) as demo:
87
- gr.Markdown(info)
88
-
89
- with gr.Row():
90
- with gr.Column():
91
- s_x1 = gr.Slider(-10, 10, value=0, step=0.1, label='Mean x1')
92
- with gr.Column():
93
- s_y1 = gr.Slider(-10, 10, value=0, step=0.1, label='Mean y1')
94
- with gr.Row():
95
- with gr.Column():
96
- s_x2 = gr.Slider(-10, 10, value=2, step=0.1, label='Mean x2')
97
- with gr.Column():
98
- s_y2 = gr.Slider(-10, 10, value=2, step=0.1, label='Mean y2')
99
-
100
- with gr.Row():
101
- with gr.Column():
102
- s_cov1 = gr.Slider(0.01, 5, value=1, step=0.01, label='Covariance 1')
103
- with gr.Column():
104
- s_cov2 = gr.Slider(0.01, 5, value=2, step=0.01, label='Covariance 2')
105
-
106
  with gr.Row():
107
- with gr.Column():
108
- s_n_samples1 = gr.Slider(1, 1000, value=200, step=1, label='n_samples 1')
109
- with gr.Column():
110
- s_n_samples2 = gr.Slider(1, 1000, value=300, step=1, label='n_samples 2')
111
-
112
- with gr.Row():
113
- with gr.Column():
114
- s_max_depth = gr.Slider(1, 50, value=1, step=1, label='AdaBoostClassifier max_depth')
115
- with gr.Column():
116
- s_n_estimators = gr.Slider(1, 500, value=300, step=1, label='AdaBoostClassifier n_estimators')
117
-
118
- btn = gr.Button('Submit')
119
-
120
- plot = gr.Plot(label='Decision Surfaces & Histogram of Scores')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
121
 
122
  btn.click(create_plot, inputs=[s_x1, s_y1, s_x2, s_y2, s_cov1, s_cov2, s_n_samples1, s_n_samples2, s_max_depth, s_n_estimators], outputs=[plot])
123
  demo.load(create_plot, inputs=[s_x1, s_y1, s_x2, s_y2, s_cov1, s_cov2, s_n_samples1, s_n_samples2, s_max_depth, s_n_estimators], outputs=[plot])
124
 
125
  demo.launch()
126
- #=======================================================
 
42
 
43
  clf.fit(X, y)
44
 
45
+ fig = plt.figure(figsize=(4.5, 6.9))
46
+ ax = fig.add_subplot(211)
47
 
48
  xx, yy, Z = get_decision_surface(X, y, clf)
49
+ ax.contourf(xx, yy, Z, cmap=CMAP, alpha=0.4)
50
 
51
  X1, y1 = X[y==0], y[y==0]
52
  X2, y2 = X[y==1], y[y==1]
53
 
54
+ ax.scatter(X1[:, 0], X1[:, 1], c=C1, edgecolor='k', s=20, label='Class A')
55
+ ax.scatter(X2[:, 0], X2[:, 1], c=C2, edgecolor='k', s=20, label='Class B')
56
 
 
57
  ax.legend()
58
  ax.set_title(f'AdaBoostClassifier Decision Surface')
59
 
60
  scores = clf.decision_function(X)
61
 
62
+ ax = fig.add_subplot(212)
63
  ax.hist(scores[y==0], bins=100, range=(scores.min(), scores.max()), facecolor=C1, label="Class A", alpha=0.5, edgecolor="k")
64
  ax.hist(scores[y==1], bins=100, range=(scores.min(), scores.max()), facecolor=C2, label="Class B", alpha=0.5, edgecolor="k")
65
 
66
  ax.set_xlabel('Score'); ax.set_ylabel('Frequency')
67
  ax.legend()
68
  ax.set_title('Decision Scores')
69
+ fig.set_tight_layout(True)
70
 
71
  return fig
72
 
73
  info = '''
 
 
74
  This example fits an [AdaBoost classifier](https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.AdaBoostClassifier.html#sklearn.ensemble.AdaBoostClassifier) on two non-linearly separable classes. The samples are generated using two [Gaussian quantiles](https://scikit-learn.org/stable/modules/generated/sklearn.datasets.make_gaussian_quantiles.html#sklearn.datasets.make_gaussian_quantiles) of configurable mean and covariance (see the sliders below).
75
 
76
  For the first generated Gaussian, the inner half quantile is assigned to Class A and the outer half quantile is assigned to class B. For the second generated quantile, the opposite assignment happens (inner = Class B, outer = Class A).
 
82
  Created by [@huabdul](https://huggingface.co/huabdul) based on [Scikit-learn docs](https://scikit-learn.org/stable/auto_examples/ensemble/plot_adaboost_twoclass.html).
83
  '''
84
  with gr.Blocks(analytics_enabled=False) as demo:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85
  with gr.Row():
86
+ with gr.Column(scale=2):
87
+ gr.Markdown(info)
88
+ with gr.Row():
89
+ with gr.Column(min_width=100):
90
+ s_x1 = gr.Slider(-10, 10, value=0, step=0.1, label='Mean x1')
91
+ with gr.Column(min_width=100):
92
+ s_y1 = gr.Slider(-10, 10, value=0, step=0.1, label='Mean y1')
93
+ with gr.Row():
94
+ with gr.Column(min_width=100):
95
+ s_x2 = gr.Slider(-10, 10, value=2, step=0.1, label='Mean x2')
96
+ with gr.Column(min_width=100):
97
+ s_y2 = gr.Slider(-10, 10, value=2, step=0.1, label='Mean y2')
98
+
99
+ with gr.Row():
100
+ with gr.Column(min_width=100):
101
+ s_cov1 = gr.Slider(0.01, 5, value=1, step=0.01, label='Covariance 1')
102
+ with gr.Column(min_width=100):
103
+ s_cov2 = gr.Slider(0.01, 5, value=2, step=0.01, label='Covariance 2')
104
+
105
+ with gr.Row():
106
+ with gr.Column(min_width=100):
107
+ s_n_samples1 = gr.Slider(1, 1000, value=200, step=1, label='n_samples 1')
108
+ with gr.Column(min_width=100):
109
+ s_n_samples2 = gr.Slider(1, 1000, value=300, step=1, label='n_samples 2')
110
+
111
+ with gr.Row():
112
+ with gr.Column(min_width=100):
113
+ s_max_depth = gr.Slider(1, 50, value=1, step=1, label='AdaBoostClassifier max_depth')
114
+ with gr.Column(min_width=100):
115
+ s_n_estimators = gr.Slider(1, 500, value=300, step=1, label='AdaBoostClassifier n_estimators')
116
+
117
+ btn = gr.Button('Submit')
118
+ with gr.Column(scale=1.5):
119
+ plot = gr.Plot(show_label=False)
120
 
121
  btn.click(create_plot, inputs=[s_x1, s_y1, s_x2, s_y2, s_cov1, s_cov2, s_n_samples1, s_n_samples2, s_max_depth, s_n_estimators], outputs=[plot])
122
  demo.load(create_plot, inputs=[s_x1, s_y1, s_x2, s_y2, s_cov1, s_cov2, s_n_samples1, s_n_samples2, s_max_depth, s_n_estimators], outputs=[plot])
123
 
124
  demo.launch()
125
+ #=======================================================