caliex commited on
Commit
297aa6f
·
1 Parent(s): eb90fa9

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +77 -0
app.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import itertools
2
+ import gradio as gr
3
+ import numpy as np
4
+ from scipy import linalg
5
+ import matplotlib.pyplot as plt
6
+ import matplotlib as mpl
7
+ from sklearn import mixture
8
+
9
+ color_iter = itertools.cycle(["navy", "c", "cornflowerblue", "gold", "darkorange"])
10
+
11
+ def plot_results(X, Y_, means, covariances, index, title):
12
+ splot = plt.subplot(2, 1, 1 + index)
13
+ for i, (mean, covar, color) in enumerate(zip(means, covariances, color_iter)):
14
+ v, w = linalg.eigh(covar)
15
+ v = 2.0 * np.sqrt(2.0) * np.sqrt(v)
16
+ u = w[0] / linalg.norm(w[0])
17
+ if not np.any(Y_ == i):
18
+ continue
19
+ plt.scatter(X[Y_ == i, 0], X[Y_ == i, 1], 0.8, color=color)
20
+ angle = np.arctan(u[1] / u[0])
21
+ angle = 180.0 * angle / np.pi
22
+ ell = mpl.patches.Ellipse(mean, v[0], v[1], angle=180.0 + angle, color=color)
23
+ ell.set_clip_box(splot.bbox)
24
+ ell.set_alpha(0.5)
25
+ splot.add_artist(ell)
26
+ plt.xlim(-9.0, 5.0)
27
+ plt.ylim(-3.0, 6.0)
28
+ plt.xticks(())
29
+ plt.yticks(())
30
+ plt.title(title)
31
+
32
+ def generate_plot(num_components_gmm, num_components_dpgmm):
33
+ num_components_gmm = int(num_components_gmm)
34
+ num_components_dpgmm = int(num_components_dpgmm)
35
+ np.random.seed(0)
36
+ C = np.array([[0.0, -0.1], [1.7, 0.4]])
37
+ X = np.r_[
38
+ np.dot(np.random.randn(n_samples, 2), C),
39
+ 0.7 * np.random.randn(n_samples, 2) + np.array([-6, 3]),
40
+ ]
41
+ gmm = mixture.GaussianMixture(n_components=num_components_gmm, covariance_type="full").fit(X)
42
+ dpgmm = mixture.BayesianGaussianMixture(n_components=num_components_dpgmm, covariance_type="full").fit(X)
43
+ plot_results(X, gmm.predict(X), gmm.means_, gmm.covariances_, 0, "Gaussian Mixture")
44
+ plot_results(
45
+ X,
46
+ dpgmm.predict(X),
47
+ dpgmm.means_,
48
+ dpgmm.covariances_,
49
+ 1,
50
+ "Bayesian Gaussian Mixture with a Dirichlet process prior",
51
+ )
52
+ plt.tight_layout()
53
+ # Save the plot as an image file
54
+ image_path = "plot.png"
55
+ plt.savefig(image_path)
56
+ plt.close() # Close the plot to release memory
57
+ return image_path
58
+
59
+ n_samples = 500
60
+
61
+ iface = gr.Interface(
62
+ generate_plot,
63
+ [
64
+ gr.inputs.Slider(1, 10, 1, label="Number of components (GMM)"),
65
+ gr.inputs.Slider(1, 10, 1, label="Number of components (DPGMM)"),
66
+ ],
67
+ gr.outputs.Image(type="pil"),
68
+ title="Gaussian Mixture Model Ellipsoids",
69
+ description="Gaussian Mixture Model Ellipsoids is an example that demonstrates the use of Expectation Maximization (GaussianMixture class) and Variational Inference (BayesianGaussianMixture class) models to fit a mixture of two Gaussians. The models have access to five components for fitting the data, but the Expectation Maximization model uses all five components while the Variational Inference model adapts the number of components based on the data. The plot shows that the Expectation Maximization model may split components arbitrarily when trying to fit too many components. See the original scikit-learn example here: https://scikit-learn.org/stable/auto_examples/mixture/plot_gmm.html",
70
+ examples=[
71
+ ["5", "5"],
72
+ ["3", "7"],
73
+ ["2", "4"],
74
+ ],
75
+ )
76
+
77
+ iface.launch()