Create new file
Browse files
app.py
ADDED
@@ -0,0 +1,243 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# For neural networks
|
2 |
+
import keras
|
3 |
+
# For train-test splits
|
4 |
+
import sklearn.model_selection
|
5 |
+
# For random calculations
|
6 |
+
import numpy
|
7 |
+
# For help with saving and opening things
|
8 |
+
import os
|
9 |
+
|
10 |
+
# Disable eager execution because its bad
|
11 |
+
from tensorflow.python.framework.ops import disable_eager_execution
|
12 |
+
disable_eager_execution()
|
13 |
+
|
14 |
+
# Start a session for checking calculations and stuff
|
15 |
+
import tensorflow as tf
|
16 |
+
sess = tf.compat.v1.Session()
|
17 |
+
|
18 |
+
from keras import backend as K
|
19 |
+
K.set_session(sess)
|
20 |
+
|
21 |
+
|
22 |
+
# Do you want it loud?
|
23 |
+
VERBOSE = 1
|
24 |
+
|
25 |
+
# This function loads a fuckton of data
|
26 |
+
def load_data():
|
27 |
+
# Open all the files we downloaded at the beginning and take out hte good bits
|
28 |
+
curves = numpy.load('/content/data_curves.npz')['curves']
|
29 |
+
geometry = numpy.load('/content/data_geometry.npz')['geometry']
|
30 |
+
constants = numpy.load('/content/constants.npz')
|
31 |
+
S = constants['S']
|
32 |
+
N = constants['N']
|
33 |
+
D = constants['D']
|
34 |
+
F = constants['F']
|
35 |
+
G = constants['G']
|
36 |
+
|
37 |
+
# Some of the good bits need additional processining
|
38 |
+
new_curves = numpy.zeros((S*N, D * F))
|
39 |
+
for i, curveset in enumerate(curves):
|
40 |
+
new_curves[i, :] = curveset.T.flatten() / 1000000
|
41 |
+
|
42 |
+
new_geometry = numpy.zeros((S*N, G * G * G))
|
43 |
+
for i, geometryset in enumerate(geometry):
|
44 |
+
new_geometry[i, :] = geometryset.T.flatten()
|
45 |
+
|
46 |
+
# Return good bits to user
|
47 |
+
return curves, geometry, S, N, D, F, G, new_curves, new_geometry
|
48 |
+
|
49 |
+
import gradio
|
50 |
+
import pandas
|
51 |
+
|
52 |
+
class Network(object):
|
53 |
+
|
54 |
+
def __init__(self, structure, weights):
|
55 |
+
# Instantiate variables
|
56 |
+
self.curves = 0
|
57 |
+
self.new_curves = 0
|
58 |
+
self.geometry = 0
|
59 |
+
self.new_geometry = 0
|
60 |
+
self.S = 0
|
61 |
+
self.N = 0
|
62 |
+
self.D = 0
|
63 |
+
self.F = 0
|
64 |
+
self.G = 0
|
65 |
+
|
66 |
+
# Load network
|
67 |
+
with open(structure, 'r') as file:
|
68 |
+
self.network = keras.models.model_from_json(file.read())
|
69 |
+
self.network.load_weights(weights)
|
70 |
+
|
71 |
+
# Load data
|
72 |
+
self._load_data()
|
73 |
+
|
74 |
+
def _load_data(self):
|
75 |
+
self.curves, self.geometry, self.S, self.N, self.D, self.F, self.G, self.new_curves, self.new_geometry = load_data()
|
76 |
+
|
77 |
+
def analysis(self, idx=None):
|
78 |
+
print(idx)
|
79 |
+
|
80 |
+
if idx is None:
|
81 |
+
idx = numpy.random.randint(1, self.S * self.N)
|
82 |
+
else:
|
83 |
+
idx = int(idx)
|
84 |
+
|
85 |
+
# Get the input
|
86 |
+
data_input = self.new_geometry[idx:(idx+1), :]
|
87 |
+
other_data_input = data_input.reshape((self.G, self.G, self.G), order='F')
|
88 |
+
|
89 |
+
# Get the outputs
|
90 |
+
predicted_output = self.network.predict(data_input)
|
91 |
+
true_output = self.new_curves[idx].reshape((3, self.F))
|
92 |
+
predicted_output = predicted_output.reshape((3, self.F))
|
93 |
+
|
94 |
+
f = numpy.linspace(0.05, 2.0, 64)
|
95 |
+
fd = pandas.DataFrame(f).rename(columns={0: "Frequency"})
|
96 |
+
df_pred = pandas.DataFrame(predicted_output.transpose()).rename(columns={0: "Surge", 1: "Heave", 2: "Pitch"})
|
97 |
+
df_true = pandas.DataFrame(true_output.transpose()).rename(columns={0: "Surge", 1: "Heave", 2: "Pitch"})
|
98 |
+
|
99 |
+
# return idx, other_data_input, true_output, predicted_output
|
100 |
+
return pandas.concat([fd, df_pred], axis=1), pandas.concat([fd, df_true], axis=1)
|
101 |
+
|
102 |
+
def synthesis(self, idx=None):
|
103 |
+
print(idx)
|
104 |
+
|
105 |
+
if idx is None:
|
106 |
+
idx = numpy.random.randint(1, self.S * self.N)
|
107 |
+
else:
|
108 |
+
idx = int(idx)
|
109 |
+
|
110 |
+
# Get the input
|
111 |
+
data_input = self.new_curves[idx:(idx+1), :]
|
112 |
+
other_data_input = data_input.reshape((3, self.F))
|
113 |
+
|
114 |
+
# Get the outputs
|
115 |
+
predicted_output = self.network.predict(data_input)
|
116 |
+
true_output = self.new_geometry[idx].reshape((self.G, self.G, self.G), order='F')
|
117 |
+
predicted_output = predicted_output.reshape((self.G, self.G, self.G), order='F')
|
118 |
+
|
119 |
+
# return idx, other_data_input, true_output, predicted_output
|
120 |
+
return predicted_output, true_output
|
121 |
+
|
122 |
+
def get_geometry(self, idx=None):
|
123 |
+
|
124 |
+
if idx is None:
|
125 |
+
idx = numpy.random.randint(1, self.S * self.N)
|
126 |
+
else:
|
127 |
+
idx = int(idx)
|
128 |
+
|
129 |
+
idx = int(idx)
|
130 |
+
|
131 |
+
# Get the input
|
132 |
+
data_input = self.new_geometry[idx:(idx+1), :]
|
133 |
+
other_data_input = data_input.reshape((self.G, self.G, self.G), order='F')
|
134 |
+
|
135 |
+
# return idx, other_data_input, true_output, predicted_output
|
136 |
+
return other_data_input
|
137 |
+
|
138 |
+
|
139 |
+
def get_performance(self, idx=None):
|
140 |
+
|
141 |
+
if idx is None:
|
142 |
+
idx = numpy.random.randint(1, self.S * self.N)
|
143 |
+
else:
|
144 |
+
idx = int(idx)
|
145 |
+
|
146 |
+
idx = int(idx)
|
147 |
+
|
148 |
+
# Get the input
|
149 |
+
data_input = self.new_curves[idx:(idx+1), :]
|
150 |
+
other_data_input = data_input.reshape((3, self.F))
|
151 |
+
|
152 |
+
f = numpy.linspace(0.05, 2.0, 64)
|
153 |
+
fd = pandas.DataFrame(f).rename(columns={0: "Frequency"})
|
154 |
+
df_pred = pandas.DataFrame(other_data_input.transpose()).rename(columns={0: "Surge", 1: "Heave", 2: "Pitch"})
|
155 |
+
table = pandas.concat([fd, df_pred], axis=1)
|
156 |
+
|
157 |
+
# return idx, other_data_input, true_output, predicted_output
|
158 |
+
return table
|
159 |
+
|
160 |
+
def simple_analysis(index):
|
161 |
+
net = Network("/content/16forward_structure.json", "/content/16forward_weights.h5")
|
162 |
+
return net.analysis(index)
|
163 |
+
|
164 |
+
def simple_synthesis(index):
|
165 |
+
net = Network("/content/16inverse_structure.json", "/content/16inverse_weights.h5")
|
166 |
+
pred, true = net.synthesis(index)
|
167 |
+
return plotly_fig(pred), plotly_fig(true)
|
168 |
+
|
169 |
+
import plotly.graph_objects as go
|
170 |
+
import numpy as np
|
171 |
+
|
172 |
+
def performance(index):
|
173 |
+
net = Network("/content/16forward_structure.json", "/content/16forward_weights.h5")
|
174 |
+
return net.get_performance(index)
|
175 |
+
|
176 |
+
def geometry(index):
|
177 |
+
net = Network("/content/16forward_structure.json", "/content/16forward_weights.h5")
|
178 |
+
values = net.get_geometry(index)
|
179 |
+
return plotly_fig(values)
|
180 |
+
|
181 |
+
|
182 |
+
def plotly_fig(values):
|
183 |
+
X, Y, Z = np.mgrid[0:1:32j, 0:1:32j, 0:1:32j]
|
184 |
+
fig = go.Figure(data=go.Volume(
|
185 |
+
x=X.flatten(),
|
186 |
+
y=Y.flatten(),
|
187 |
+
z=Z.flatten(),
|
188 |
+
value=values.flatten(),
|
189 |
+
isomin=-0.1,
|
190 |
+
isomax=0.8,
|
191 |
+
opacity=0.1, # needs to be small to see through all surfaces
|
192 |
+
surface_count=21, # needs to be a large number for good volume rendering
|
193 |
+
))
|
194 |
+
return fig
|
195 |
+
|
196 |
+
with gradio.Blocks() as analysis_demo:
|
197 |
+
with gradio.Row():
|
198 |
+
with gradio.Column():
|
199 |
+
num = gradio.Number(42, label="data index")
|
200 |
+
btn1 = gradio.Button("Select")
|
201 |
+
with gradio.Column():
|
202 |
+
geo = gradio.Plot(label="Geometry")
|
203 |
+
|
204 |
+
with gradio.Row():
|
205 |
+
btn2 = gradio.Button("Estimate Spectrum")
|
206 |
+
|
207 |
+
with gradio.Row():
|
208 |
+
with gradio.Column():
|
209 |
+
pred = gradio.Timeseries(x="Frequency", y=['Surge', 'Heave', 'Pitch'], label="Predicted")
|
210 |
+
|
211 |
+
with gradio.Column():
|
212 |
+
true = gradio.Timeseries(x="Frequency", y=['Surge', 'Heave', 'Pitch'], label="True")
|
213 |
+
|
214 |
+
btn1.click(fn=geometry, inputs=[num], outputs=[geo])
|
215 |
+
btn2.click(fn=simple_analysis, inputs=[num], outputs=[pred, true])
|
216 |
+
|
217 |
+
with gradio.Blocks() as synthesis_demo:
|
218 |
+
with gradio.Row():
|
219 |
+
with gradio.Column():
|
220 |
+
num = gradio.Number(42, label="data index")
|
221 |
+
btn1 = gradio.Button("Select")
|
222 |
+
with gradio.Column():
|
223 |
+
perf = gradio.Timeseries(x="Frequency", y=['Surge', 'Heave', 'Pitch'], label="Performance")
|
224 |
+
|
225 |
+
with gradio.Row():
|
226 |
+
btn2 = gradio.Button("Synthesize Geometry")
|
227 |
+
|
228 |
+
with gradio.Row():
|
229 |
+
with gradio.Column():
|
230 |
+
pred = gradio.Plot(label="Predicted")
|
231 |
+
|
232 |
+
with gradio.Column():
|
233 |
+
true = gradio.Plot(label="True")
|
234 |
+
|
235 |
+
btn1.click(fn=performance, inputs=[num], outputs=[perf])
|
236 |
+
btn2.click(fn=simple_synthesis, inputs=[num], outputs=[pred, true])
|
237 |
+
|
238 |
+
all_synthesis_demos = gradio.TabbedInterface([synthesis_demo], ["Random Spectrum from Data"])
|
239 |
+
|
240 |
+
all_analysis_demos = gradio.TabbedInterface([analysis_demo], ["Random Geometry from Data"])
|
241 |
+
|
242 |
+
demo = gradio.TabbedInterface([all_analysis_demos, all_synthesis_demos], ["Analysis", "Synthesis"])
|
243 |
+
demo.launch()
|