'.join(\n",
+ " '{} | '.format(''.join(str(_) for _ in row)) for row in table)\n",
+ " )\n",
+ " ))\n",
+ "\n",
+ "avg_active_cases = aggregated[\"ActiveCases\"].mean()\n",
+ "\n",
+ "aggr = aggregated.agg({\"ActiveCases\": \"sum\", \"Population\": \"sum\"}, as_index=False)\n",
+ "final_df = aggr.to_frame().T\n",
+ "final_df[\"PercentageInfected\"] = final_df.apply(lambda x: (x[\"ActiveCases\"]/x[\"Population\"]) * 100, axis=1)\n",
+ "percentage_infected = final_df[\"PercentageInfected\"].to_numpy()[0]\n",
+ "\n",
+ "display(HTML(\n",
+ " \"\"\"\n",
+ " Result:\n",
+ " \n",
+ " \n",
+ " \n",
+ " Average active cases: | \n",
+ " {0:.2f} | \n",
+ " \n",
+ " \n",
+ " Proportion of total population currently infected: | \n",
+ " {1:.2f}% | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \"\"\".format(avg_active_cases, percentage_infected))\n",
+ ")\n",
+ "\n",
+ "\n",
+ "print(\"\"\"\n",
+ "I was unsure whether the problem wanted the percentage of the population\n",
+ "affected for each country, so I have included the percentage for each country \n",
+ "as well, just in case:\n",
+ "\"\"\")\n",
+ "aggregated.head()"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 439
+ },
+ "cellView": "form",
+ "id": "lRtwSfqSAPAY",
+ "outputId": "a7037d5d-fbd6-48b3-e47b-32090720dfd1"
+ },
+ "execution_count": 87,
+ "outputs": [
+ {
+ "output_type": "display_data",
+ "data": {
+ "text/plain": [
+ ""
+ ],
+ "text/html": [
+ "\n",
+ " Result:\n",
+ " \n",
+ " \n",
+ " \n",
+ " Average active cases: | \n",
+ " 60038.20 | \n",
+ " \n",
+ " \n",
+ " Proportion of total population currently infected: | \n",
+ " 0.17% | \n",
+ " \n",
+ " \n",
+ " \n",
+ " "
+ ]
+ },
+ "metadata": {}
+ },
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "\n",
+ "I was unsure whether the problem wanted the percentage of the population\n",
+ "affected for each country, so I have included the percentage for each country \n",
+ "as well, just in case:\n",
+ "\n"
+ ]
+ },
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ " Country ActiveCases Population PercentageInfected\n",
+ "129 Martinique 222576.901869 374087.0 0.594987\n",
+ "68 Faeroe Islands 26936.998989 49233.0 0.547133\n",
+ "195 St. Barth 4854.999825 9945.0 0.488185\n",
+ "84 Guadeloupe 193026.939904 399794.0 0.482816\n",
+ "93 Iceland 130899.111498 345393.0 0.378986"
+ ],
+ "text/html": [
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " Country | \n",
+ " ActiveCases | \n",
+ " Population | \n",
+ " PercentageInfected | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " 129 | \n",
+ " Martinique | \n",
+ " 222576.901869 | \n",
+ " 374087.0 | \n",
+ " 0.594987 | \n",
+ " \n",
+ " \n",
+ " 68 | \n",
+ " Faeroe Islands | \n",
+ " 26936.998989 | \n",
+ " 49233.0 | \n",
+ " 0.547133 | \n",
+ " \n",
+ " \n",
+ " 195 | \n",
+ " St. Barth | \n",
+ " 4854.999825 | \n",
+ " 9945.0 | \n",
+ " 0.488185 | \n",
+ " \n",
+ " \n",
+ " 84 | \n",
+ " Guadeloupe | \n",
+ " 193026.939904 | \n",
+ " 399794.0 | \n",
+ " 0.482816 | \n",
+ " \n",
+ " \n",
+ " 93 | \n",
+ " Iceland | \n",
+ " 130899.111498 | \n",
+ " 345393.0 | \n",
+ " 0.378986 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " "
+ ]
+ },
+ "metadata": {},
+ "execution_count": 87
+ }
+ ]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/training/MSML-602-Final-Project-Final-Version.ipynb b/training/src/MSML-602-Final-Project-Final-Version.ipynb
similarity index 100%
rename from training/MSML-602-Final-Project-Final-Version.ipynb
rename to training/src/MSML-602-Final-Project-Final-Version.ipynb
diff --git a/training/MSML-602-Final-Project.ipynb b/training/src/MSML-602-Final-Project.ipynb
similarity index 100%
rename from training/MSML-602-Final-Project.ipynb
rename to training/src/MSML-602-Final-Project.ipynb
diff --git a/training/downloader.py b/training/src/downloader.py
similarity index 100%
rename from training/downloader.py
rename to training/src/downloader.py
diff --git a/web/.DS_Store b/web/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..118b7cef83a8c160b967ea3632dd5706d0c96202
Binary files /dev/null and b/web/.DS_Store differ
diff --git a/web/.dockerignore b/web/.dockerignore
new file mode 100644
index 0000000000000000000000000000000000000000..85dcc16df69a9860507592c89f31f438c8fe7b41
--- /dev/null
+++ b/web/.dockerignore
@@ -0,0 +1,2 @@
+.git
+node_modules
diff --git a/web/.gitignore b/web/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..cdb93cd5ebbb0b464b7cec99bbc254d23aac6815
--- /dev/null
+++ b/web/.gitignore
@@ -0,0 +1 @@
+.python-version
diff --git a/web/Dockerfile.backend b/web/Dockerfile.backend
new file mode 100644
index 0000000000000000000000000000000000000000..56d7c3abb0d3191c96928f570bdfb087bac9706c
--- /dev/null
+++ b/web/Dockerfile.backend
@@ -0,0 +1,11 @@
+FROM tensorflow/tensorflow:2.11.0
+
+WORKDIR /app
+
+COPY ./requirements.txt /app/requirements.txt
+
+RUN pip install -r requirements.txt
+
+COPY ./src /app
+
+CMD ["python3", "-m", "flask", "run", "--host=0.0.0.0"]
diff --git a/web/Dockerfile.frontend b/web/Dockerfile.frontend
new file mode 100644
index 0000000000000000000000000000000000000000..3cf56759b56f7255cad33f93dc2184cdbf6ebee1
--- /dev/null
+++ b/web/Dockerfile.frontend
@@ -0,0 +1,13 @@
+FROM node:18-alpine3.15
+
+WORKDIR /app
+
+COPY package.json ./
+
+COPY package-lock.json ./
+
+RUN npm update && npm install
+
+COPY . .
+
+CMD ["npm", "start"]
diff --git a/web/Makefile b/web/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..11e44fa440eb4dda3a019b2331a822715c045732
--- /dev/null
+++ b/web/Makefile
@@ -0,0 +1,19 @@
+.PHONY: build-backend
+build-backend: ## Build backend services
+ docker build -f Dockerfile.backend -t pred-backend ./backend
+
+.PHONY: build-frontend
+build-frontend: ## Build frontend services
+ docker build -f Dockerfile.frontend -t pred-frontend ./frontend
+
+.PHONY: up-prod
+up-prod: ## Build backend services
+ docker build -f Dockerfile.backend -t pred-backend ./backend
+ docker build -f Dockerfile.frontend -t pred-frontend ./frontend
+ docker-compose -f docker-compose.yml -f docker-compose.prod.yml up -d
+
+.PHONY: up-dev
+up-dev: ## Build backend services
+ docker build -f Dockerfile.backend -t pred-backend ./backend
+ docker build -f Dockerfile.frontend -t pred-frontend ./frontend
+ docker-compose up -d
diff --git a/web/README.md b/web/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..101b6c4e2510bc9a88cc7aeb0522db4d81347118
--- /dev/null
+++ b/web/README.md
@@ -0,0 +1 @@
+MSML 602 Assignments/Projects
diff --git a/web/backend/.DS_Store b/web/backend/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..43b0834167ba7ae34323804f8857283a145c62e0
Binary files /dev/null and b/web/backend/.DS_Store differ
diff --git a/web/backend/.dockerignore b/web/backend/.dockerignore
new file mode 100644
index 0000000000000000000000000000000000000000..6b8710a711f3b689885aa5c26c6c06bde348e82b
--- /dev/null
+++ b/web/backend/.dockerignore
@@ -0,0 +1 @@
+.git
diff --git a/web/backend/.gitignore b/web/backend/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..d35ef6f723159707e9228f1fd58cf9bc7f91595d
--- /dev/null
+++ b/web/backend/.gitignore
@@ -0,0 +1,2 @@
+.python-version
+__pycache__
diff --git a/web/backend/README.md b/web/backend/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..101b6c4e2510bc9a88cc7aeb0522db4d81347118
--- /dev/null
+++ b/web/backend/README.md
@@ -0,0 +1 @@
+MSML 602 Assignments/Projects
diff --git a/web/backend/requirements.txt b/web/backend/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..76d1d6414d1e33a91ca898a596b3a7f51ff49915
--- /dev/null
+++ b/web/backend/requirements.txt
@@ -0,0 +1,7 @@
+flask==2.2.2
+flask-cors==3.0.10
+pandas==1.5.0
+numpy==1.23.3
+scipy==1.9.1
+scikit-learn==1.1.2
+requests-cache==0.9.7
diff --git a/web/backend/src/.DS_Store b/web/backend/src/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..1d8f5849d5dca4c4e7d71dc165035cefeec86a14
Binary files /dev/null and b/web/backend/src/.DS_Store differ
diff --git a/web/backend/src/app.py b/web/backend/src/app.py
new file mode 100644
index 0000000000000000000000000000000000000000..9db5907ccf7313e144fe23c24ac998a3e463b915
--- /dev/null
+++ b/web/backend/src/app.py
@@ -0,0 +1,244 @@
+from flask import Flask, request
+import tensorflow as tf
+from datetime import datetime, timedelta
+import logging
+import requests
+import requests_cache
+import pandas as pd
+import json
+import numpy as np
+import pickle
+import math
+import pytz
+
+from flask_cors import CORS, cross_origin
+
+
+session = requests_cache.CachedSession('requests-cache')
+
+app = Flask(__name__)
+cors = CORS(app)
+app.config['CORS_HEADERS'] = 'Content-Type'
+
+app.logger.setLevel(logging.INFO)
+DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
+API_KEY = "e1f10a1e78da46f5b10a1e78da96f525"
+BASE_URL = "https://api.weather.com/v1/location/KDCA:9:US/observations/historical.json?apiKey={api_key}&units=e&startDate={start_date}&endDate={end_date}"
+model = tf.keras.models.load_model('/app/model', compile=False)
+
+scaler = pickle.load(open('./model/scaler.pkl','rb'))
+cols_to_scale = ["pressure", "wspd","heat_index","dewPt", "rh", "vis", "wc", "wdir_degree", "clds_ordinal",
+ "day_sin", "day_cos", "year_sin", "year_cos", "wdir_sin", "wdir_cos"]
+
+def get_NaN_counts(df):
+ nan_counts = df.isna().sum()
+ return pd.concat([nan_counts, ((nan_counts/len(df))*100).round(2)],
+ axis=1,
+ keys=["NaN count", "Percentage"])
+
+def clds_to_ordinal(row):
+ mapping = {
+ "SKC": 0,
+ "CLR": 0,
+ "FEW": 1,
+ "SCT": 2,
+ "BKN": 3,
+ "OVC": 4,
+ "VV": 5
+ }
+ clds = row["clds"]
+ if pd.isnull(clds):
+ return np.NaN
+ return mapping[clds]
+
+def clean_wspd(row):
+ if row["wdir_cardinal"] == "CALM":
+ return 0
+ return row["wspd"]
+
+def restrict_wspd(row):
+ if row["wspd"] < 0:
+ return 0
+ return row["wspd"]
+
+def restrict_rh(row):
+ if row["rh"] < 0:
+ return 0
+ if row["rh"] > 100:
+ return 100
+ return row["rh"]
+
+def clean_wdir(row):
+ if row["wdir_cardinal"] == "CALM":
+ return 0
+ return row["wdir"]
+
+def wdir_cardinal_to_deg(row):
+ wdir = row["wdir"]
+ if not pd.isnull(wdir):
+ return wdir
+ cardinal_directions = {
+ 'N': 0,
+ 'NNE': 22.5,
+ 'NE': 45,
+ 'ENE': 67.5,
+ 'E': 90,
+ 'ESE': 112.5,
+ 'SE': 135,
+ 'SSE': 157.5,
+ 'S': 180,
+ 'SSW': 202.5,
+ 'SW': 225,
+ 'WSW': 247.5,
+ 'W': 270,
+ 'WNW': 292.5,
+ 'NW': 315,
+ 'NNW': 337.5,
+ 'CALM': 0,
+ 'VAR': -1
+ }
+ wdir_cardinal = row["wdir_cardinal"]
+
+ return cardinal_directions[wdir_cardinal] if wdir_cardinal in cardinal_directions else np.NaN
+
+def prepare_dataframe(_df, start_timestamp, end_timestamp):
+ dates_df = pd.DataFrame()
+ dates_df["obs_timestamp"] = pd.date_range(start_timestamp, end_timestamp, freq="H")
+
+ _df = dates_df.merge(_df, how='left', on='obs_timestamp')
+ _df = _df.astype(
+ {
+ 'temp': 'float',
+ 'pressure': 'float',
+ 'wspd': 'float',
+ 'heat_index': 'float'
+ },
+ )
+
+ _df["wdir_cardinal"].fillna(method="bfill", inplace=True)
+ _df["wdir_degree"] = _df.apply(wdir_cardinal_to_deg, axis=1)
+ _df["clds_ordinal"] = _df.apply(clds_to_ordinal, axis=1)
+ _df["temp"].interpolate("polynomial", order=2, inplace=True)
+ _df["pressure"].interpolate("polynomial", order=2, inplace=True)
+ _df["heat_index"].interpolate("polynomial", order=2, inplace=True)
+ _df["wdir"].fillna(method="bfill", inplace=True)
+ _df["wdir"] = _df.apply(clean_wdir, axis=1)
+ _df["wspd"] = _df.apply(clean_wspd, axis=1)
+ _df["wspd"].interpolate("polynomial", order=2, inplace=True)
+ _df["wspd"] = _df.apply(restrict_wspd, axis=1)
+ _df["clds"].fillna(method="bfill", inplace=True)
+ _df["clds_ordinal"].interpolate("linear", inplace=True)
+ _df["dewPt"].interpolate("polynomial", order=2, inplace=True)
+ _df["rh"].interpolate("polynomial", order=2, inplace=True)
+ _df["rh"] = _df.apply(restrict_rh, axis=1)
+ _df["wc"].interpolate("polynomial", order=2, inplace=True)
+ _df["vis"].fillna(method="bfill", inplace=True)
+ _df.drop(["wdir", "wdir_cardinal", "clds"], axis=1, inplace=True)
+
+ _df = _df.dropna()
+
+ _df = _df.sort_values(by=['obs_timestamp'])
+ date_time = _df.pop('obs_timestamp')
+ timestamp_s = date_time.map(pd.Timestamp.timestamp)
+ day = 24*60*60
+ year = (365.2425)*day
+
+ _df['day_sin'] = np.sin(timestamp_s * (2 * np.pi / day))
+ _df['day_cos'] = np.cos(timestamp_s * (2 * np.pi / day))
+ _df['year_sin'] = np.sin(timestamp_s * (2 * np.pi / year))
+ _df['year_cos'] = np.cos(timestamp_s * (2 * np.pi / year))
+ _df['wdir_sin'] = np.sin(_df["wdir_degree"])
+ _df['wdir_cos'] = np.cos(_df["wdir_degree"])
+
+ return _df, date_time
+
+
+def map_data_to_dataframe(data, target_date):
+ end_timestamp = target_date - timedelta(minutes=8)
+ start_timestamp = end_timestamp - timedelta(days=8) + timedelta(hours=1)
+
+ df = pd.read_json(json.dumps(data))
+ df["obs_timestamp"] = df.apply(lambda x: datetime.fromtimestamp(x["valid_time_gmt"]).strftime(DATE_FORMAT), axis=1)
+ df = df.astype({'obs_timestamp': 'datetime64[ns]'})
+ initial_cols = ["temp", "obs_timestamp", "pressure", "wspd", "heat_index", "dewPt", "rh", "vis", "wc", "wdir", "wdir_cardinal", "clds" ]
+ df = df[initial_cols]
+
+ df, _ = prepare_dataframe(df, start_timestamp.strftime(DATE_FORMAT), end_timestamp.strftime(DATE_FORMAT))
+ return df
+
+
+def map_to_timestamp(predictions, target_date):
+ start = target_date + timedelta(hours=1)
+ end = start + timedelta(hours=23)
+ target_hours = [x.to_pydatetime().strftime(DATE_FORMAT) for x in pd.date_range(start, end, freq="H")]
+ return { h: predictions[idx] for idx, h in enumerate(target_hours)}
+
+def predict(df):
+ predict_df = df[-168:]
+ predict_df_features = predict_df[cols_to_scale]
+ predict_df_features = scaler.transform(predict_df_features.values)
+ predict_df[cols_to_scale] = predict_df_features
+ predictions = model(predict_df.to_numpy().reshape(1, 168, 16))
+ return predictions
+
+def predict_for_date(target_date):
+ date_format = "%Y%m%d"
+ start_date = target_date - timedelta(days=9)
+ res = session.get(BASE_URL.format(api_key=API_KEY, start_date=start_date.strftime(date_format), end_date=target_date.strftime(date_format)))
+ data = res.json()
+ df = map_data_to_dataframe(data["observations"], target_date)
+ predictions = predict(df)
+ flattened = list(map(lambda x: math.floor(x), predictions.numpy().flatten().tolist()))
+ return map_to_timestamp(flattened, target_date)
+
+def get_actual_temperatures(target_date):
+ date_format = "%Y%m%d"
+ start_date = target_date - timedelta(days=1) #Because api uses utc
+ end_date = target_date + timedelta(days=1)
+ start_date_str = (start_date - timedelta(days=1)).strftime(date_format)
+ end_date_str = end_date.strftime(date_format)
+ today = datetime.today().astimezone(pytz.timezone("America/New_York")).date()
+ req_url = BASE_URL.format(api_key=API_KEY, start_date=start_date_str, end_date=end_date_str)
+ if target_date.date() < today:
+ res = session.get(req_url)
+ else:
+ res = requests.get(req_url)
+ start_timestamp = target_date + timedelta(minutes=52)
+ end_timestamp = end_date + timedelta(days=1) - timedelta(minutes=8)
+
+
+ data = res.json()
+ df = pd.read_json(json.dumps(data["observations"]))
+ df["obs_timestamp"] = df.apply(lambda x: datetime.fromtimestamp(x["valid_time_gmt"]).astimezone(pytz.timezone("America/New_York")).strftime(DATE_FORMAT), axis=1)
+ df = df.astype({'obs_timestamp': 'datetime64[ns]'})
+ initial_cols = ["temp", "obs_timestamp"]
+ df = df[initial_cols]
+ dates_df = pd.DataFrame()
+ dates_df["obs_timestamp"] = pd.date_range(start_timestamp, end_timestamp, freq="H")
+ df = dates_df.merge(df, how='left', on='obs_timestamp')
+
+ df["obs_timestamp"] = df.apply(lambda x: (x["obs_timestamp"] + timedelta(minutes=8)).strftime(DATE_FORMAT), axis=1)
+ dicts = df.to_dict("records")
+ reduced = { k["obs_timestamp"]: k["temp"] for k in dicts}
+ for k in reduced:
+ if np.isnan(reduced[k]):
+ reduced[k] = None
+ return reduced
+
+@app.route("/predictions")
+@cross_origin()
+def get_predictions():
+ today = datetime.today().astimezone(pytz.timezone("America/New_York")).date()
+ target_date = datetime.strptime(request.args["target_date"], "%Y-%m-%d")
+ app.logger.info(today)
+ app.logger.info(target_date)
+ # target_dates = list(filter(lambda x: x < today, [x.to_pydatetime() for x in pd.date_range(start_date, end_date, freq="D").to_list()]))
+ predictions = predict_for_date(target_date)
+ actual_temp = get_actual_temperatures(target_date) if target_date.date() <= today else None
+
+ merged = { k: {"predicted": predictions[k], "actual": actual_temp[k] if actual_temp else None} for k in predictions}
+ response = app.response_class(response=json.dumps(merged),
+ status=200,
+ mimetype='application/json')
+ return response
+
diff --git a/web/backend/src/model/keras_metadata.pb b/web/backend/src/model/keras_metadata.pb
new file mode 100644
index 0000000000000000000000000000000000000000..3c430558dd2991b086a220a96537f6cc10450c6c
--- /dev/null
+++ b/web/backend/src/model/keras_metadata.pb
@@ -0,0 +1,8 @@
+
+�%root"_tf_keras_sequential*�%{"name": "sequential", "trainable": true, "expects_training_arg": true, "dtype": "float32", "batch_input_shape": null, "must_restore_from_config": false, "class_name": "Sequential", "config": {"name": "sequential", "layers": [{"class_name": "InputLayer", "config": {"batch_input_shape": {"class_name": "__tuple__", "items": [null, 168, 16]}, "dtype": "float32", "sparse": false, "ragged": false, "name": "lstm_input"}}, {"class_name": "LSTM", "config": {"name": "lstm", "trainable": true, "dtype": "float32", "return_sequences": false, "return_state": false, "go_backwards": false, "stateful": false, "unroll": false, "time_major": false, "units": 12, "activation": "tanh", "recurrent_activation": "sigmoid", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 1}, "recurrent_initializer": {"class_name": "Orthogonal", "config": {"gain": 1.0, "seed": null}, "shared_object_id": 2}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 3}, "unit_forget_bias": true, "kernel_regularizer": null, "recurrent_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "recurrent_constraint": null, "bias_constraint": null, "dropout": 0.0, "recurrent_dropout": 0.0, "implementation": 2}}, {"class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 24, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "Zeros", "config": {}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Reshape", "config": {"name": "reshape", "trainable": true, "dtype": "float32", "target_shape": {"class_name": "__tuple__", "items": [24, 1]}}}]}, "shared_object_id": 10, "build_input_shape": {"class_name": "TensorShape", "items": [null, 168, 16]}, "is_graph_network": true, "full_save_spec": {"class_name": "__tuple__", "items": [[{"class_name": "TypeSpec", "type_spec": "tf.TensorSpec", "serialized": [{"class_name": "TensorShape", "items": [null, 168, 16]}, "float32", "lstm_input"]}], {}]}, "save_spec": {"class_name": "TypeSpec", "type_spec": "tf.TensorSpec", "serialized": [{"class_name": "TensorShape", "items": [null, 168, 16]}, "float32", "lstm_input"]}, "keras_version": "2.9.0", "backend": "tensorflow", "model_config": {"class_name": "Sequential", "config": {"name": "sequential", "layers": [{"class_name": "InputLayer", "config": {"batch_input_shape": {"class_name": "__tuple__", "items": [null, 168, 16]}, "dtype": "float32", "sparse": false, "ragged": false, "name": "lstm_input"}, "shared_object_id": 0}, {"class_name": "LSTM", "config": {"name": "lstm", "trainable": true, "dtype": "float32", "return_sequences": false, "return_state": false, "go_backwards": false, "stateful": false, "unroll": false, "time_major": false, "units": 12, "activation": "tanh", "recurrent_activation": "sigmoid", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 1}, "recurrent_initializer": {"class_name": "Orthogonal", "config": {"gain": 1.0, "seed": null}, "shared_object_id": 2}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 3}, "unit_forget_bias": true, "kernel_regularizer": null, "recurrent_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "recurrent_constraint": null, "bias_constraint": null, "dropout": 0.0, "recurrent_dropout": 0.0, "implementation": 2}, "shared_object_id": 5}, {"class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 24, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 6}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 7}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 8}, {"class_name": "Reshape", "config": {"name": "reshape", "trainable": true, "dtype": "float32", "target_shape": {"class_name": "__tuple__", "items": [24, 1]}}, "shared_object_id": 9}]}}, "training_config": {"loss": {"class_name": "MeanSquaredError", "config": {"reduction": "auto", "name": "mean_squared_error"}, "shared_object_id": 11}, "metrics": null, "weighted_metrics": null, "loss_weights": null, "optimizer_config": {"class_name": "Adam", "config": {"name": "Adam", "learning_rate": 0.0010000000474974513, "decay": 0.0, "beta_1": 0.8999999761581421, "beta_2": 0.9990000128746033, "epsilon": 1e-07, "amsgrad": false}}}}2
+�root.layer_with_weights-0"_tf_keras_rnn_layer*�
+{"name": "lstm", "trainable": true, "expects_training_arg": true, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "LSTM", "config": {"name": "lstm", "trainable": true, "dtype": "float32", "return_sequences": false, "return_state": false, "go_backwards": false, "stateful": false, "unroll": false, "time_major": false, "units": 12, "activation": "tanh", "recurrent_activation": "sigmoid", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 1}, "recurrent_initializer": {"class_name": "Orthogonal", "config": {"gain": 1.0, "seed": null}, "shared_object_id": 2}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 3}, "unit_forget_bias": true, "kernel_regularizer": null, "recurrent_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "recurrent_constraint": null, "bias_constraint": null, "dropout": 0.0, "recurrent_dropout": 0.0, "implementation": 2}, "shared_object_id": 5, "input_spec": [{"class_name": "InputSpec", "config": {"dtype": null, "shape": {"class_name": "__tuple__", "items": [null, null, 16]}, "ndim": 3, "max_ndim": null, "min_ndim": null, "axes": {}}, "shared_object_id": 12}], "build_input_shape": {"class_name": "TensorShape", "items": [null, 168, 16]}}2
+�root.layer_with_weights-1"_tf_keras_layer*�{"name": "dense", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 24, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 6}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 7}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 8, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 2, "axes": {"-1": 12}}, "shared_object_id": 13}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 12]}}2
+�root.layer-2"_tf_keras_layer*�{"name": "reshape", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Reshape", "config": {"name": "reshape", "trainable": true, "dtype": "float32", "target_shape": {"class_name": "__tuple__", "items": [24, 1]}}, "shared_object_id": 9}2
+�root.layer_with_weights-0.cell"_tf_keras_layer*�{"name": "lstm_cell", "trainable": true, "expects_training_arg": true, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "LSTMCell", "config": {"name": "lstm_cell", "trainable": true, "dtype": "float32", "units": 12, "activation": "tanh", "recurrent_activation": "sigmoid", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 1}, "recurrent_initializer": {"class_name": "Orthogonal", "config": {"gain": 1.0, "seed": null}, "shared_object_id": 2}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 3}, "unit_forget_bias": true, "kernel_regularizer": null, "recurrent_regularizer": null, "bias_regularizer": null, "kernel_constraint": null, "recurrent_constraint": null, "bias_constraint": null, "dropout": 0.0, "recurrent_dropout": 0.0, "implementation": 2}, "shared_object_id": 4}2
+�^root.keras_api.metrics.0"_tf_keras_metric*�{"class_name": "Mean", "name": "loss", "dtype": "float32", "config": {"name": "loss", "dtype": "float32"}, "shared_object_id": 14}2
\ No newline at end of file
diff --git a/web/backend/src/model/saved_model.pb b/web/backend/src/model/saved_model.pb
new file mode 100644
index 0000000000000000000000000000000000000000..61914fa8cd611145798d1b1f4baa5b9577abb0de
Binary files /dev/null and b/web/backend/src/model/saved_model.pb differ
diff --git a/web/backend/src/model/scaler.pkl b/web/backend/src/model/scaler.pkl
new file mode 100644
index 0000000000000000000000000000000000000000..c3e37aeb139bc6625373d9d05e0985c23fa86fa9
Binary files /dev/null and b/web/backend/src/model/scaler.pkl differ
diff --git a/web/backend/src/model/variables/variables.data-00000-of-00001 b/web/backend/src/model/variables/variables.data-00000-of-00001
new file mode 100644
index 0000000000000000000000000000000000000000..14393b7ed4f3ddf02717455e8dc8c3c844bc8c54
Binary files /dev/null and b/web/backend/src/model/variables/variables.data-00000-of-00001 differ
diff --git a/web/backend/src/model/variables/variables.index b/web/backend/src/model/variables/variables.index
new file mode 100644
index 0000000000000000000000000000000000000000..9e2b3a5eadf5bc1f8b227ec9c791f21c9113cc6b
Binary files /dev/null and b/web/backend/src/model/variables/variables.index differ
diff --git a/web/docker-compose.prod.yml b/web/docker-compose.prod.yml
new file mode 100644
index 0000000000000000000000000000000000000000..4e621ccbe6ce8d213015a942036aea9242904e48
--- /dev/null
+++ b/web/docker-compose.prod.yml
@@ -0,0 +1,10 @@
+version: "3.5"
+services:
+ pred-backend:
+ ports:
+ - "81:5000"
+
+ pred-frontend:
+ command: npm start
+ ports:
+ - "80:4200"
diff --git a/web/docker-compose.yml b/web/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..199afb5c286940b6c7835a3397b530db20fc518d
--- /dev/null
+++ b/web/docker-compose.yml
@@ -0,0 +1,34 @@
+version: "3.5"
+services:
+ pred-backend:
+ image: pred-backend:latest
+ ports:
+ - "3001:5000"
+ volumes:
+ - ./backend/src:/app
+ networks:
+ - prediction-project
+ logging:
+ driver: "json-file"
+ options:
+ max-size: "10m"
+
+ pred-frontend:
+ image: pred-frontend:latest
+ command: npm run start-dev
+ ports:
+ - "4200:4200"
+ volumes:
+ - ./frontend/src:/app/src
+ networks:
+ - prediction-project
+ logging:
+ driver: "json-file"
+ options:
+ max-size: "10m"
+
+volumes:
+ db-volume:
+
+networks:
+ prediction-project:
diff --git a/web/frontend/.browserslistrc b/web/frontend/.browserslistrc
new file mode 100644
index 0000000000000000000000000000000000000000..4f9ac26980c156a3d525267010d5f78144b43519
--- /dev/null
+++ b/web/frontend/.browserslistrc
@@ -0,0 +1,16 @@
+# This file is used by the build system to adjust CSS and JS output to support the specified browsers below.
+# For additional information regarding the format and rule options, please see:
+# https://github.com/browserslist/browserslist#queries
+
+# For the full list of supported browsers by the Angular framework, please see:
+# https://angular.io/guide/browser-support
+
+# You can see what browsers were selected by your queries by running:
+# npx browserslist
+
+last 1 Chrome version
+last 1 Firefox version
+last 2 Edge major versions
+last 2 Safari major versions
+last 2 iOS major versions
+Firefox ESR
diff --git a/web/frontend/.dockerignore b/web/frontend/.dockerignore
new file mode 100644
index 0000000000000000000000000000000000000000..70d5723693800f5cd32dd0db6d4d9d9ebfd4cba3
--- /dev/null
+++ b/web/frontend/.dockerignore
@@ -0,0 +1,3 @@
+node_modules
+.github
+.angular
diff --git a/web/frontend/.editorconfig b/web/frontend/.editorconfig
new file mode 100644
index 0000000000000000000000000000000000000000..59d9a3a3e73ffc640517ef488f6f89d6270195d1
--- /dev/null
+++ b/web/frontend/.editorconfig
@@ -0,0 +1,16 @@
+# Editor configuration, see https://editorconfig.org
+root = true
+
+[*]
+charset = utf-8
+indent_style = space
+indent_size = 2
+insert_final_newline = true
+trim_trailing_whitespace = true
+
+[*.ts]
+quote_type = single
+
+[*.md]
+max_line_length = off
+trim_trailing_whitespace = false
diff --git a/web/frontend/.github/CODE_OF_CONDUCT.md b/web/frontend/.github/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000000000000000000000000000000000000..64da87772ba970899e7b8ad3ee10be7bb7851b11
--- /dev/null
+++ b/web/frontend/.github/CODE_OF_CONDUCT.md
@@ -0,0 +1,46 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at . The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
\ No newline at end of file
diff --git a/web/frontend/.github/COMMIT_CONVENTION.md b/web/frontend/.github/COMMIT_CONVENTION.md
new file mode 100644
index 0000000000000000000000000000000000000000..261c7ca20e4595661f530350ce622794137f9a33
--- /dev/null
+++ b/web/frontend/.github/COMMIT_CONVENTION.md
@@ -0,0 +1,83 @@
+## Git Commit Message Convention
+
+> This is adapted from [Angular's commit convention](https://github.com/conventional-changelog/conventional-changelog/blob/master/packages/conventional-changelog-angular/convention.md).
+
+#### Examples
+
+Appears under "Features" header, `compiler` subheader:
+
+```
+feat(compiler): add 'comments' option
+```
+
+Appears under "Bug Fixes" header, `sidebar` subheader, with a link to issue #28:
+
+```
+fix(sidebar): handle events on blur
+
+close #28
+```
+
+Appears under "Performance Improvements" header, and under "Breaking Changes" with the breaking change explanation:
+
+```
+perf(core): improve vdom diffing by removing 'foo' option
+
+BREAKING CHANGE: The 'foo' option has been removed.
+```
+
+The following commit and commit `667ecc1` do not appear in the changelog if they are under the same release. If not, the revert commit appears under the "Reverts" header.
+
+```
+revert: feat(compiler): add 'comments' option
+
+This reverts commit 667ecc1654a317a13331b17617d973392f415f02.
+```
+
+### Full Message Format
+
+A commit message consists of a **header**, **body** and **footer**. The header has a **type**, **scope** and **subject**:
+
+```
+():
+
+
+
+ |