Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from math import e
|
2 |
+
import gradio as gr
|
3 |
+
import numpy as np
|
4 |
+
import matplotlib.pyplot as plt
|
5 |
+
from matplotlib.collections import LineCollection
|
6 |
+
|
7 |
+
from sklearn.linear_model import LinearRegression
|
8 |
+
from sklearn.isotonic import IsotonicRegression
|
9 |
+
from sklearn.utils import check_random_state
|
10 |
+
|
11 |
+
def visualize_isotonic_regression(n, random_range_min, random_range_max, out_of_bounds):
|
12 |
+
if random_range_min >= random_range_max:
|
13 |
+
raise ValueError("Random Value Range (Min) must be less than Random Value Range (Max)")
|
14 |
+
|
15 |
+
x = np.arange(n)
|
16 |
+
rs = check_random_state(0)
|
17 |
+
y = rs.randint(random_range_min, random_range_max, size=(n,)) + 50.0 * np.log1p(np.arange(n))
|
18 |
+
|
19 |
+
ir = IsotonicRegression(out_of_bounds=out_of_bounds if out_of_bounds else "clip")
|
20 |
+
y_ = ir.fit_transform(x, y)
|
21 |
+
|
22 |
+
lr = LinearRegression()
|
23 |
+
lr.fit(x[:, np.newaxis], y) # x needs to be 2d for LinearRegression
|
24 |
+
|
25 |
+
segments = [[[i, y[i]], [i, y_[i]]] for i in range(n)]
|
26 |
+
lc = LineCollection(segments, zorder=0)
|
27 |
+
lc.set_array(np.ones(len(y)))
|
28 |
+
lc.set_linewidths(np.full(n, 0.5))
|
29 |
+
|
30 |
+
fig, (ax0, ax1) = plt.subplots(ncols=2, figsize=(12, 6))
|
31 |
+
|
32 |
+
ax0.plot(x, y, "C0.", markersize=12)
|
33 |
+
ax0.plot(x, y_, "C1.-", markersize=12)
|
34 |
+
ax0.plot(x, lr.predict(x[:, np.newaxis]), "C2-")
|
35 |
+
ax0.add_collection(lc)
|
36 |
+
ax0.legend(("Training data", "Isotonic fit", "Linear fit"), loc="lower right")
|
37 |
+
ax0.set_title("Isotonic regression fit on noisy data (n=%d)" % n)
|
38 |
+
|
39 |
+
x_test = np.linspace(np.min(x), np.max(x), 1000) # Update test values range
|
40 |
+
ax1.plot(x_test, ir.predict(x_test), "C1-")
|
41 |
+
ax1.plot(ir.X_thresholds_, ir.y_thresholds_, "C1.", markersize=12)
|
42 |
+
ax1.set_title("Prediction function (%d thresholds)" % len(ir.X_thresholds_))
|
43 |
+
|
44 |
+
return fig
|
45 |
+
|
46 |
+
parameters = [
|
47 |
+
gr.inputs.Slider(10, 100, step=10, default=50, label="Number of data points (n)"),
|
48 |
+
gr.inputs.Slider(-50, 50, step=1, default=-50, label="Random Value Range (Min)"),
|
49 |
+
gr.inputs.Slider(-50, 50, step=1, default=50, label="Random Value Range (Max)"),
|
50 |
+
gr.inputs.Dropdown(["clip", "nan", "raise"], default="clip", label="Out of Bounds Strategy"),
|
51 |
+
]
|
52 |
+
|
53 |
+
description = "This app presents an illustration of the isotonic regression on generated data (non-linear monotonic trend with homoscedastic uniform noise). The isotonic regression algorithm finds a non-decreasing approximation of a function while minimizing the mean squared error on the training data. The benefit of such a non-parametric model is that it does not assume any shape for the target function besides monotonicity. For comparison a linear regression is also presented. See the original scikit-learn example here: https://scikit-learn.org/stable/auto_examples/miscellaneous/plot_isotonic_regression.html"
|
54 |
+
|
55 |
+
examples = [
|
56 |
+
[50, -30, 30, "clip"],
|
57 |
+
[30, -20, 40, "nan"],
|
58 |
+
[70, -10, 20, "raise"],
|
59 |
+
]
|
60 |
+
|
61 |
+
iface = gr.Interface(fn=visualize_isotonic_regression, inputs=parameters, outputs="plot", title="Isotonic Regression Visualization", description=description, examples=examples)
|
62 |
+
iface.launch()
|