Jayabalambika commited on
Commit
7d7dc8e
·
1 Parent(s): 902bca8

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -156
app.py DELETED
@@ -1,156 +0,0 @@
1
- import gradio as gr
2
- import time
3
- import numpy as np
4
- import matplotlib.pyplot as plt
5
-
6
- from scipy.linalg import toeplitz, cholesky
7
- from sklearn.covariance import LedoitWolf, OAS
8
-
9
- np.random.seed(0)
10
-
11
-
12
-
13
-
14
-
15
-
16
- def plot_mse():
17
- # plot MSE
18
- plt.clf()
19
- plt.subplot(2, 1, 1)
20
- plt.errorbar(
21
- slider_samples_range,
22
- lw_mse.mean(1),
23
- yerr=lw_mse.std(1),
24
- label="Ledoit-Wolf",
25
- color="navy",
26
- lw=2,
27
- )
28
- plt.errorbar(
29
- slider_samples_range,
30
- oa_mse.mean(1),
31
- yerr=oa_mse.std(1),
32
- label="OAS",
33
- color="darkorange",
34
- lw=2,
35
- )
36
- plt.ylabel("Squared error")
37
- plt.legend(loc="upper right")
38
- plt.title("Comparison of covariance estimators")
39
- plt.xlim(5, 31)
40
- return plt
41
-
42
-
43
- def plot_shrinkage():
44
- # plot shrinkage coefficient
45
- plt.subplot(2, 1, 2)
46
- plt.errorbar(
47
- slider_samples_range,
48
- lw_shrinkage.mean(1),
49
- yerr=lw_shrinkage.std(1),
50
- label="Ledoit-Wolf",
51
- color="navy",
52
- lw=2,
53
- )
54
- plt.errorbar(
55
- slider_samples_range,
56
- oa_shrinkage.mean(1),
57
- yerr=oa_shrinkage.std(1),
58
- label="OAS",
59
- color="darkorange",
60
- lw=2,
61
- )
62
- plt.xlabel("n_samples")
63
- plt.ylabel("Shrinkage")
64
- plt.legend(loc="lower right")
65
- plt.ylim(plt.ylim()[0], 1.0 + (plt.ylim()[1] - plt.ylim()[0]) / 10.0)
66
- plt.xlim(5, 31)
67
-
68
- # plt.show()
69
- return plt
70
-
71
-
72
-
73
-
74
-
75
-
76
- title = "Ledoit-Wolf vs OAS estimation"
77
-
78
- # def greet(name):
79
- # return "Hello " + name + "!"
80
- with gr.Blocks(title=title, theme=gr.themes.Default(font=[gr.themes.GoogleFont("Inconsolata"), "Arial", "sans-serif"])) as demo:
81
- gr.Markdown(f"# {title}")
82
-
83
- gr.Markdown(
84
- """
85
- The usual covariance maximum likelihood estimate can be regularized using shrinkage. Ledoit and Wolf proposed a close formula to compute the asymptotically optimal shrinkage parameter (minimizing a MSE criterion), yielding the Ledoit-Wolf covariance estimate.
86
-
87
- Chen et al. proposed an improvement of the Ledoit-Wolf shrinkage parameter, the OAS coefficient, whose convergence is significantly better under the assumption that the data are Gaussian.
88
-
89
- This example, inspired from Chen’s publication [1], shows a comparison of the estimated MSE of the LW and OAS methods, using Gaussian distributed data.
90
-
91
- [1] “Shrinkage Algorithms for MMSE Covariance Estimation” Chen et al., IEEE Trans. on Sign. Proc., Volume 58, Issue 10, October 2010.
92
- """)
93
-
94
- n_features = 100
95
-
96
- min_slider_samples_range = gr.Slider(6, 31, value=6, step=1, label="min_samples_range", info="Choose between 6 and 31")
97
- max_slider_samples_range = gr.Slider(6, 31, value=31, step=1, label="max_samples_range", info="Choose between 6 and 31")
98
-
99
-
100
-
101
- r = 0.1
102
-
103
- real_cov = toeplitz(r ** np.arange(n_features))
104
- coloring_matrix = cholesky(real_cov)
105
- gr.Markdown(" **[Demo is based on sklearn docs](https://scikit-learn.org/stable/auto_examples/covariance/plot_lw_vs_oas.html)**")
106
- # name = "hardy"
107
- # greet_btn = gr.Button("Greet")
108
- # output = gr.Textbox(label="Output Box")
109
- # greet_btn.click(fn=greet, inputs=name, outputs=output)
110
- gr.Label(value="Comparison of Covariance Estimators")
111
- # generate_plots()
112
- # print("slider_samples_range:",slider_samples_range)
113
- slider_samples_range =np.arange(min_slider_samples_range,max_slider_samples_range,1)
114
- n_features = 100
115
- repeat = 100
116
- lw_mse = np.zeros((slider_samples_range.size, repeat))
117
- oa_mse = np.zeros((slider_samples_range.size, repeat))
118
- lw_shrinkage = np.zeros((slider_samples_range.size, repeat))
119
- oa_shrinkage = np.zeros((slider_samples_range.size, repeat))
120
-
121
-
122
- for i, n_samples in enumerate(slider_samples_range):
123
- for j in range(repeat):
124
-
125
- X = np.dot(np.random.normal(size=(n_samples, n_features)), coloring_matrix.T)
126
-
127
- lw = LedoitWolf(store_precision=False, assume_centered=True)
128
- lw.fit(X)
129
- lw_mse[i, j] = lw.error_norm(real_cov, scaling=False)
130
- lw_shrinkage[i, j] = lw.shrinkage_
131
-
132
- oa = OAS(store_precision=False, assume_centered=True)
133
- oa.fit(X)
134
- oa_mse[i, j] = oa.error_norm(real_cov, scaling=False)
135
- oa_shrinkage[i, j] = oa.shrinkage_
136
- #if min_slider_samples_range:
137
-
138
- min_slider_samples_range.change(plot_mse, outputs= gr.Plot() )
139
- max_slider_samples_range.change(plot_shrinkage, outputs= gr.Plot() )
140
-
141
-
142
-
143
- #elif max_slider_samples_range:
144
-
145
-
146
-
147
- # elif changed == False:
148
- # min_slider_samples_range.change(generate_plots, inputs=[min_slider_samples_range,max_slider_samples_range], outputs= gr.Plot() )
149
- # max_slider_samples_range.change(generate_plots, inputs=[min_slider_samples_range,max_slider_samples_range], outputs= gr.Plot() )
150
- # changed = True
151
-
152
- # else:
153
- # pass
154
-
155
-
156
- demo.launch()