File size: 6,193 Bytes
f532f8a
 
 
 
 
 
 
 
 
 
 
 
 
3234b71
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f299743
3234b71
 
 
 
 
 
 
 
 
 
 
 
 
 
f532f8a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f299743
3234b71
 
 
 
 
 
 
 
 
 
 
f532f8a
 
 
 
 
 
f299743
f532f8a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3234b71
 
 
 
 
 
f532f8a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
import gradio as gr
import matplotlib.pyplot as plt
# from skops import hub_utils
import time
import pickle
import numpy as np
from sklearn.preprocessing import StandardScaler
from sklearn.linear_model import LassoLarsIC
from sklearn.pipeline import make_pipeline
from sklearn.datasets import load_diabetes



def load_dataset():
  X, y = load_diabetes(return_X_y=True, as_frame=True)
  return X,y


def aic_pipeline(X,y):
  lasso_lars_ic = make_pipeline(StandardScaler(), LassoLarsIC(criterion="aic")).fit(X, y)
  return lasso_lars_ic


def zou_et_al_criterion_rescaling(criterion, n_samples, noise_variance):
    """Rescale the information criterion to follow the definition of Zou et al."""
    return criterion - n_samples * np.log(2 * np.pi * noise_variance) - n_samples


def zou_et_all_aic(lasso_lars_ic):
  aic_criterion = zou_et_al_criterion_rescaling(
      lasso_lars_ic[-1].criterion_,
      n_samples,
      lasso_lars_ic[-1].noise_variance_,
  )

  index_alpha_path_aic = np.flatnonzero(
      lasso_lars_ic[-1].alphas_ == lasso_lars_ic[-1].alpha_
  )[0]

  return index_alpha_path_aic, aic_criterion

def zou_et_all_bic(lasso_lars_ic):
  lasso_lars_ic.set_params(lassolarsic__criterion="bic").fit(X, y)
  bic_criterion = zou_et_al_criterion_rescaling(
    lasso_lars_ic[-1].criterion_,
    n_samples,
    lasso_lars_ic[-1].noise_variance_,
  )

  index_alpha_path_bic = np.flatnonzero(
    lasso_lars_ic[-1].alphas_ == lasso_lars_ic[-1].alpha_
  )[0]

  return index_alpha_path_bic, bic_criterion

def fn_assert_true():
  assert index_alpha_path_bic == index_alpha_path_aic



def visualize_input_data():
    fig = plt.figure(1, facecolor="w", figsize=(5, 5))
    plt.plot(aic_criterion, color="tab:blue", marker="o", label="AIC criterion")
    plt.plot(bic_criterion, color="tab:orange", marker="o", label="BIC criterion")
    plt.vlines(
        index_alpha_path_bic,
        aic_criterion.min(),
        aic_criterion.max(),
        color="black",
        linestyle="--",
        label="Selected alpha",
    )
    plt.legend()
    plt.ylabel("Information criterion")
    plt.xlabel("Lasso model sequence")
    _ = plt.title("Lasso model selection via AIC and BIC")


    return fig

title = "Lasso model selection via information criteria"

import gradio as gr
import matplotlib.pyplot as plt
# from skops import hub_utils
import time
import pickle
import numpy as np
from sklearn.preprocessing import StandardScaler
from sklearn.linear_model import LassoLarsIC
from sklearn.pipeline import make_pipeline
from sklearn.datasets import load_diabetes



def load_dataset():
  X, y = load_diabetes(return_X_y=True, as_frame=True)
  return X,y


def aic_pipeline(X,y):
  lasso_lars_ic = make_pipeline(StandardScaler(), LassoLarsIC(criterion="aic")).fit(X, y)
  return lasso_lars_ic


def zou_et_al_criterion_rescaling(criterion, n_samples, noise_variance):
    """Rescale the information criterion to follow the definition of Zou et al."""
    return criterion - n_samples * np.log(2 * np.pi * noise_variance) - n_samples


def zou_et_all_aic(lasso_lars_ic):
  aic_criterion = zou_et_al_criterion_rescaling(
      lasso_lars_ic[-1].criterion_,
      n_samples,
      lasso_lars_ic[-1].noise_variance_,
  )

  index_alpha_path_aic = np.flatnonzero(
      lasso_lars_ic[-1].alphas_ == lasso_lars_ic[-1].alpha_
  )[0]

  return index_alpha_path_aic, aic_criterion

def zou_et_all_bic(lasso_lars_ic):
  lasso_lars_ic.set_params(lassolarsic__criterion="bic").fit(X, y)
  bic_criterion = zou_et_al_criterion_rescaling(
    lasso_lars_ic[-1].criterion_,
    n_samples,
    lasso_lars_ic[-1].noise_variance_,
  )

  index_alpha_path_bic = np.flatnonzero(
    lasso_lars_ic[-1].alphas_ == lasso_lars_ic[-1].alpha_
  )[0]

  return index_alpha_path_bic, bic_criterion

def fn_assert_true():
  assert index_alpha_path_bic == index_alpha_path_aic



def visualize_input_data():
    fig = plt.figure(1, facecolor="w", figsize=(5, 5))
    plt.plot(aic_criterion, color="tab:blue", marker="o", label="AIC criterion")
    plt.plot(bic_criterion, color="tab:orange", marker="o", label="BIC criterion")
    plt.vlines(
        index_alpha_path_bic,
        aic_criterion.min(),
        aic_criterion.max(),
        color="black",
        linestyle="--",
        label="Selected alpha",
    )
    plt.legend()
    plt.ylabel("Information criterion")
    plt.xlabel("Lasso model sequence")
    _ = plt.title("Lasso model selection via AIC and BIC")


    return fig

title = " Lasso model selection via information criteria"

with gr.Blocks(title=title) as demo:
  gr.Markdown(f"# {title}")
  gr.Markdown(
        """
        Probabilistic model selection using Information Criterion. 
        This method in statistics is useful because they dont require a hold out set test set(cross validation set).
        
        AIC and BIC are two ways of scoring a model based on its log-likelihood and complexity.
        
        It is important to note that the optimization to find alpha with LassoLarsIC relies on the AIC or BIC criteria 
        that are computed in-sample, thus on the training set directly. 
        This approach differs from the cross-validation procedure.
        
        Also one of the drawbacks of these kinds of Probabilistic model is that same general statistic cannot be used across models.
        Instead, a careful metric must be devised for each of the models separately. 
        The uncertainity of the model is not taken into account.
        """
        
  )
    

    
  gr.Markdown("See original example [here](https://scikit-learn.org/stable/auto_examples/linear_model/plot_lasso_lars_ic.html#sphx-glr-auto-examples-linear-model-plot-lasso-lars-ic-py).")
    
  ##process
  X,y = load_dataset()
  lasso_lars_ic = aic_pipeline(X,y)
  n_samples = X.shape[0]
  index_alpha_path_aic, aic_criterion = zou_et_all_aic(lasso_lars_ic)
  
  index_alpha_path_bic, bic_criterion = zou_et_all_bic(lasso_lars_ic)

  fn_assert_true()

  with gr.Tab("AIC BIC Criteria"):
      btn = gr.Button(value="Plot AIC BIC Criteria w Regularization")
      btn.click(visualize_input_data, outputs= gr.Plot(label='AIC BIC Criteria') )

    

    

demo.launch()

    

    

demo.launch()