NTaylor commited on
Commit
3ed123a
1 Parent(s): e1527f1
Files changed (1) hide show
  1. app.py +0 -72
app.py CHANGED
@@ -5,26 +5,10 @@ from sklearn.datasets import make_regression
5
  import pandas as pd
6
  from sklearn.linear_model import ARDRegression, LinearRegression, BayesianRidge
7
  import matplotlib.pyplot as plt
8
- import seaborn as sns
9
  from matplotlib.colors import SymLogNorm
10
  import gradio as gr
11
 
12
 
13
- # def make_regression_data(n_samples=100,
14
- # n_features=100,
15
- # n_informative=10,
16
- # noise=8,
17
- # coef=True,
18
- # random_state=42,):
19
- # X, y, true_weights = make_regression(
20
- # n_samples=n_samples,
21
- # n_features=n_features,
22
- # n_informative=n_informative,
23
- # noise=noise,
24
- # coef=coef,
25
- # random_state=random_state,
26
- # )
27
- # return X, y, true_weights
28
 
29
  X, y, true_weights = make_regression(
30
  n_samples=100,
@@ -41,17 +25,6 @@ X, y, true_weights = make_regression(
41
  # We now fit both Bayesian models and the OLS to later compare the models'
42
  # coefficients.
43
 
44
- # olr = LinearRegression().fit(X, y)
45
- # brr = BayesianRidge(compute_score=True, n_iter=30).fit(X, y)
46
- # ard = ARDRegression(compute_score=True, n_iter=30).fit(X, y)
47
- # df = pd.DataFrame(
48
- # {
49
- # "Weights of true generative process": true_weights,
50
- # "ARDRegression": ard.coef_,
51
- # "BayesianRidge": brr.coef_,
52
- # "LinearRegression": olr.coef_,
53
- # }
54
- # )
55
 
56
  def fit_regression_models(n_iter=30, X=X, y=y, true_weights=true_weights):
57
  olr = LinearRegression().fit(X, y)
@@ -77,19 +50,6 @@ def fit_regression_models(n_iter=30, X=X, y=y, true_weights=true_weights):
77
  # Now we compare the coefficients of each model with the weights of
78
  # the true generative model.
79
 
80
-
81
- # plt.figure(figsize=(10, 6))
82
- # ax = sns.heatmap(
83
- # df.T,
84
- # norm=SymLogNorm(linthresh=10e-4, vmin=-80, vmax=80),
85
- # cbar_kws={"label": "coefficients' values"},
86
- # cmap="seismic_r",
87
- # )
88
- # plt.ylabel("linear model")
89
- # plt.xlabel("coefficients")
90
- # plt.tight_layout(rect=(0, 0, 1, 0.95))
91
- # _ = plt.title("Models' coefficients")
92
-
93
  def visualize_coefficients(df=None):
94
  fig = plt.figure(figsize=(10, 6))
95
  ax = sns.heatmap(
@@ -119,16 +79,6 @@ def visualize_coefficients(df=None):
119
  # --------------------------------
120
 
121
 
122
- # ard_scores = -np.array(ard.scores_)
123
- # brr_scores = -np.array(brr.scores_)
124
- # plt.plot(ard_scores, color="navy", label="ARD")
125
- # plt.plot(brr_scores, color="red", label="BayesianRidge")
126
- # plt.ylabel("Log-likelihood")
127
- # plt.xlabel("Iterations")
128
- # plt.xlim(1, 30)
129
- # plt.legend()
130
- # _ = plt.title("Models log-likelihood")
131
-
132
  def plot_marginal_log_likelihood(ard=None, brr=None, n_iter=30):
133
 
134
  fig = plt.figure(figsize=(10, 6))
@@ -240,28 +190,6 @@ def generate_polynomial_dataset(degree = 10):
240
  # Plotting polynomial regressions with std errors of the scores
241
  # -------------------------------------------------------------
242
 
243
- # ax = sns.scatterplot(
244
- # data=full_data, x="input_feature", y="target", color="black", alpha=0.75
245
- # )
246
- # ax.plot(X_plot, y_plot, color="black", label="Ground Truth")
247
- # ax.plot(X_plot, y_brr, color="red", label="BayesianRidge with polynomial features")
248
- # ax.plot(X_plot, y_ard, color="navy", label="ARD with polynomial features")
249
- # ax.fill_between(
250
- # X_plot.ravel(),
251
- # y_ard - y_ard_std,
252
- # y_ard + y_ard_std,
253
- # color="navy",
254
- # alpha=0.3,
255
- # )
256
- # ax.fill_between(
257
- # X_plot.ravel(),
258
- # y_brr - y_brr_std,
259
- # y_brr + y_brr_std,
260
- # color="red",
261
- # alpha=0.3,
262
- # )
263
- # ax.legend()
264
- # _ = ax.set_title("Polynomial fit of a non-linear feature")
265
 
266
 
267
  def visualize_bayes_regressions_polynomial_features(degree = 10):
 
5
  import pandas as pd
6
  from sklearn.linear_model import ARDRegression, LinearRegression, BayesianRidge
7
  import matplotlib.pyplot as plt
 
8
  from matplotlib.colors import SymLogNorm
9
  import gradio as gr
10
 
11
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
 
13
  X, y, true_weights = make_regression(
14
  n_samples=100,
 
25
  # We now fit both Bayesian models and the OLS to later compare the models'
26
  # coefficients.
27
 
 
 
 
 
 
 
 
 
 
 
 
28
 
29
  def fit_regression_models(n_iter=30, X=X, y=y, true_weights=true_weights):
30
  olr = LinearRegression().fit(X, y)
 
50
  # Now we compare the coefficients of each model with the weights of
51
  # the true generative model.
52
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  def visualize_coefficients(df=None):
54
  fig = plt.figure(figsize=(10, 6))
55
  ax = sns.heatmap(
 
79
  # --------------------------------
80
 
81
 
 
 
 
 
 
 
 
 
 
 
82
  def plot_marginal_log_likelihood(ard=None, brr=None, n_iter=30):
83
 
84
  fig = plt.figure(figsize=(10, 6))
 
190
  # Plotting polynomial regressions with std errors of the scores
191
  # -------------------------------------------------------------
192
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
193
 
194
 
195
  def visualize_bayes_regressions_polynomial_features(degree = 10):