nesticot commited on
Commit
ae30f51
·
1 Parent(s): 3f2f598

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +355 -253
app.py CHANGED
@@ -1,29 +1,48 @@
1
- print('Running')
2
- import time
3
  import requests
4
  import pandas as pd
5
  import seaborn as sns
6
  import matplotlib.pyplot as plt
7
  from matplotlib.pyplot import figure
8
  from matplotlib.offsetbox import OffsetImage, AnnotationBbox
9
- from scipy import stats
10
  import matplotlib.lines as mlines
11
  import matplotlib.transforms as mtransforms
12
  import numpy as np
13
- import time
14
  #import plotly.express as px
15
  #!pip install chart_studio
16
- #import chart_studio.tools as tls
17
- from bs4 import BeautifulSoup
18
  import matplotlib.pyplot as plt
19
  import numpy as np
20
  import matplotlib.font_manager as font_manager
21
  from datetime import datetime
22
  import pytz
23
- from matplotlib.ticker import MaxNLocator
24
- from matplotlib.patches import Ellipse
25
- import matplotlib.transforms as transforms
26
- from matplotlib.gridspec import GridSpec
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  datetime.now(pytz.timezone('US/Pacific')).strftime('%B %d, %Y')
28
  # Configure Notebook
29
  #%matplotlib inline
@@ -36,50 +55,119 @@ warnings.filterwarnings('ignore')
36
  # import yahoo_oauth
37
  import json
38
  #import openpyxl
39
- from sklearn import preprocessing
40
- from datetime import timedelta
41
- import dataframe_image as dfi
42
- # from google.colab import drive
43
- def percentile(n):
44
- def percentile_(x):
45
- return np.percentile(x, n)
46
- percentile_.__name__ = 'percentile_%s' % n
47
- return percentile_
48
-
49
- import os
50
- import praw
51
- import matplotlib.pyplot as plt
52
- import matplotlib.colors
53
- import matplotlib.colors as mcolors
54
- cmap_sum = matplotlib.colors.LinearSegmentedColormap.from_list("", ["#4285f4","#FFFFFF","#F0E442"])
55
- #import pybaseball
56
- import math
57
- import matplotlib.ticker as mtick
58
- import matplotlib.ticker as ticker
59
-
60
- colour_palette = ['#FFB000','#648FFF','#785EF0',
61
- '#DC267F','#FE6100','#3D1EB2','#894D80','#16AA02','#B5592B','#A3C1ED']
62
- import matplotlib.colors as mcolors
63
- from matplotlib.ticker import FuncFormatter
64
  from matplotlib.font_manager import FontProperties
 
65
 
66
- import numpy as np
67
- import matplotlib.pyplot as plt
68
- import matplotlib.colors
 
 
 
 
69
 
70
- #x,y,c = zip(*np.random.rand(30,3)*4-2)
 
71
 
72
- #norm=plt.Normalize(-2,2)
73
- co = matplotlib.colors.LinearSegmentedColormap.from_list("", ["#ffffff","#F0E442"])
 
 
74
 
 
 
 
 
75
 
76
- try:
77
- data_r = requests.get("https://pub-api-ro.fantasysports.yahoo.com/fantasy/v2/league/427.l.public;out=settings/players;position=ALL;start=0;count=3000;sort=rank_season;search=;out=percent_owned;out=auction_values,ranks;ranks=season;ranks_by_position=season;out=expert_ranks;expert_ranks.rank_type=projected_season_remaining/draft_analysis;cut_types=diamond;slices=last7days?format=json_f").json()
78
- key_check = data_r['fantasy_content']['league']['players']
 
 
 
 
 
 
 
 
 
 
79
 
80
- except KeyError:
81
- data_r = requests.get("https://pub-api-ro.fantasysports.yahoo.com/fantasy/v2/league/427.l.public;out=settings/players;position=ALL;start=0;count=1151;sort=rank_season;search=;out=percent_owned;out=auction_values,ranks;ranks=season;ranks_by_position=season;out=expert_ranks;expert_ranks.rank_type=projected_season_remaining/draft_analysis;cut_types=diamond;slices=last7days?format=json_f").json()
82
- print('key_checked')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
83
 
84
  total_list = []
85
 
@@ -101,266 +189,278 @@ for x in data_r['fantasy_content']['league']['players']:
101
  single_list.append(0)
102
  total_list.append(single_list)
103
 
 
104
 
105
- yahoo_df = pd.DataFrame(total_list,columns = ['player_id','rank_value','full','first','last','average_pick','average_auction_cost','display_position','editorial_team_abbr','percent_owned'])
106
- yahoo_df_2 = yahoo_df.copy()
107
-
108
- # Write your code here.
109
- response = requests.get("https://www.naturalstattrick.com/playerlist.php?fromseason=20232024&thruseason=20232024&stype=2&sit=all&stdoi=oi&rate=n")
110
- soup = BeautifulSoup(response.text, 'html.parser')
111
- table_rows = soup.findAll('tr')
112
- table_rows = table_rows[1:-1]
113
- table_rows[0].findAll('td')
114
-
115
- player_name = []
116
- player_position = []
117
- player_team = []
118
- player_id = []
119
-
120
- for i in range(0,len(table_rows)-1):
121
- player_name.append(str(table_rows[i].findAll('td')[0].contents[0]))
122
- player_position.append(table_rows[i].findAll('td')[1].contents[0])
123
- player_team.append(table_rows[i].findAll('td')[2].contents[0])
124
- player_id.append(str(table_rows[i].findAll('td')[3].contents[0])[-76:][:7])
125
-
126
- player_id_df = pd.DataFrame({'Player':player_name,'Player ID':player_id,'Position':player_position,'Team':player_team})
127
- #player_id_df.index.name = 'Player Name'
128
- player_id_df.head()
129
-
130
- skater_df = player_id_df[player_id_df['Position'] != 'G']
131
- goalie_df = player_id_df[player_id_df['Position'] == 'G']
132
-
133
- season = 20232024
134
- seasontype = 2
135
 
136
- def nat_stat_trick_range_pp_gp(rookie='n',start_date='2022-10-01',end_date=str(pd.to_datetime(datetime.now(pytz.timezone('US/Pacific')).strftime('%Y-%m-%d')).date()),sit='all',gp=1):
137
- url = f'https://www.naturalstattrick.com/playerteams.php?fromseason={season}&thruseason={season}&stype={seasontype}&sit=pp&score=all&stdoi=std&rate=y&team=ALL&pos=S&loc=B&toi=0&gpfilt=gpteam&fd=&td=&tgp='+str(gp)+'&lines=single&draftteam=ALL'
138
-
139
- player_list_all = []
140
- response = requests.get(url)
141
- soup = BeautifulSoup(response.text, 'html.parser')
142
- table_rows = soup.findAll('tr')
143
- table_rows = table_rows[1:]
144
-
145
- for j in range(0,len(table_rows)):
146
- p_string = [str(x).strip('<td>').strip('</') for x in list(table_rows[j].findAll('td')) if "<td>" in str(x)]
147
- player_list_all.append([p_string[0]]+[str(table_rows[j].findAll('td')[1]).split('>')[2].split('<')[0]]+p_string[1:]+[str(table_rows[j].findAll('td')[1])[98:105].strip('</a></td>')])
148
- #table_rows[0].findAll('td')
149
-
150
- if soup != "":
151
- columns_list = [str(x).split('>')[1].split('<')[0] for x in soup.findAll('th')]+['player_id']
152
- df_url = pd.DataFrame(data=player_list_all,columns=columns_list)
153
-
154
- df_url = df_url.fillna(0)
155
- df_url['Shots+Hits+Blocks/60'] = df_url['Shots/60'].astype(float)+df_url['Hits/60'].astype(float)+df_url['Shots Blocked/60'].astype(float)
156
- df_url['Shots+Hits/60'] = df_url['Shots/60'].astype(float)+df_url['Hits/60'].astype(float)
157
- #print(url)
158
- return df_url
159
-
160
- team_abv = pd.read_csv('team_abv.csv')
161
- team_dict = team_abv.set_index('team_abv').to_dict()
162
-
163
- yahoo_nhl_df = pd.read_csv('yahoo_to_nhl.csv', encoding='unicode_escape')
164
-
165
- def nat_stat_convert(df):
166
- for i in range(0,len(df.columns)):
167
- if df.columns[i][-3:]=='/60':
168
- if 'ix' not in df.columns[i]:
169
- df[df.columns[i]] = np.round(df[df.columns[i]].astype(float)*df['TOI'].astype(float)/60,0)
170
- df = df.rename(columns={df.columns[i]: df.columns[i].replace('/60','')})
171
- else:
172
- df[df.columns[i]] = df[df.columns[i]].astype(float)*df['TOI'].astype(float)/60
173
- df = df.rename(columns={df.columns[i]: df.columns[i].replace('/60','')})
174
 
175
- df['Faceoffs %'] = df['Faceoffs Won']/(df['Faceoffs Won']+df['Faceoffs Lost'])
 
 
 
 
176
 
177
- return df
 
178
 
179
- from shiny import ui, render, App
180
- import matplotlib.image as mpimg
 
 
181
  app_ui = ui.page_fluid(
182
- #ui.panel_title("Simulate a normal distribution"),
183
-
184
  ui.layout_sidebar(
185
-
 
 
 
 
 
186
  ui.panel_sidebar(
187
- #ui.input_date_range("date_range_id", "Date range input",start = statcast_df.game_date.min(), end = statcast_df.game_date.max()),
188
- ui.input_select("team_id", "Select Team",team_dict,width=1,size=1,selected='ANA'),
189
- ui.input_numeric("n_1", "Last Games x", value=1),
190
- ui.input_numeric("n_2", "Last Games y", value=0),
191
- ui.input_numeric("n_3", "Last Games z", value=0),
192
- ui.input_numeric("top_n", "Show top 'n'", value=10),
193
- ),
194
-
195
  ui.panel_main(ui.tags.h3(""),
196
  ui.div({"style": "font-size:2em;"},ui.output_text("txt_title")),
197
  #ui.tags.h2("Fantasy Hockey Schedule Summary"),
198
- ui.tags.h5("Created By: @TJStats, Data: Natural Stat Trick, Yahoo Fantasy"),
199
  ui.div({"style": "font-size:1.2em;"},ui.output_text("txt")),
200
- ui.output_table("pp_roundup"),
201
- #ui.tags.h5('Legend'),
202
- #ui.tags.h6('An Off Night is defined as a day in which less than half the teams in the NHL are playing'),
203
- #ui.tags.h6('The scores are determined by using games played, off-nights, B2B, and strength of opponents') )
204
- )
205
- ),
206
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
207
 
208
 
209
 
210
 
211
  from urllib.request import Request, urlopen
212
- from shiny import App, reactive, ui
213
- from shiny.ui import h2, tags
214
  # importing OpenCV(cv2) module
215
 
216
 
217
 
218
 
219
- #print(app_ui)
220
  def server(input, output, session):
221
 
222
-
223
  @output
224
  @render.text
225
  def txt():
226
- return f'{input.team_id()} Last Games PP Summary'
227
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
228
  @output
229
  @render.text
230
  def txt_title():
231
-
232
- return f'Team Last Games PP% Leaders'
 
 
 
233
 
 
 
 
 
 
234
 
 
 
 
 
235
 
 
 
236
 
 
 
 
 
 
 
237
 
238
- @output
239
- @render.table
240
- def pp_roundup():
241
 
242
- top_n = input.top_n()
243
- n_1 = input.n_1()
244
- n_2 = input.n_2()
245
- n_3 = input.n_3()
246
 
247
- list_of_columns = ['Player', 'Team', 'display_position','percent_owned','L'+str(n_1)+' PP TOI','L'+str(n_2)+' PP TOI','L'+str(n_3)+' PP TOI',
248
- 'L'+str(n_1)+' PP%','L'+str(n_2)+' PP%','L'+str(n_3)+' PP%']
 
249
 
250
- list_of_columns_name = ['Player', 'Team', 'Position','Roster%','L'+str(n_1)+' PP TOI','L'+str(n_2)+' PP TOI','L'+str(n_3)+' PP TOI',
251
- 'L'+str(n_1)+' PP%','L'+str(n_2)+' PP%','L'+str(n_3)+' PP%']
252
 
253
- if type(n_1) is not int:
254
- n_1 = 1
255
-
256
- if n_2 == 0:
257
- list_of_columns.remove(f'L{str(n_2)} PP TOI')
258
- list_of_columns.remove(f'L{str(n_2)} PP%')
259
- list_of_columns_name.remove(f'L{str(n_2)} PP TOI')
260
- list_of_columns_name.remove(f'L{str(n_2)} PP%')
261
 
262
- if n_3 == 0:
263
- list_of_columns.remove(f'L{str(n_3)} PP TOI')
264
- list_of_columns.remove(f'L{str(n_3)} PP%')
265
- list_of_columns_name.remove(f'L{str(n_3)} PP TOI')
266
- list_of_columns_name.remove(f'L{str(n_3)} PP%')
267
 
268
- start_date ='2023-09-01'
269
- end_date = '2024-05-01'
270
 
271
- df_pp_1 = nat_stat_trick_range_pp_gp(rookie='n',start_date = start_date,end_date = end_date, sit='pp',gp=n_1)
272
- df_pp_2 = nat_stat_trick_range_pp_gp(rookie='n',start_date = start_date,end_date = end_date, sit='pp',gp=n_2)
273
- df_pp_3 = nat_stat_trick_range_pp_gp(rookie='n',start_date = start_date,end_date = end_date, sit='pp',gp=n_3)
 
 
274
 
275
 
276
- df_all_pp_1 = nat_stat_convert(df_pp_1)
277
- df_all_pp_2 = nat_stat_convert(df_pp_2)
278
- df_all_pp_3 = nat_stat_convert(df_pp_3)
279
 
280
- df_final = df_all_pp_1.merge( df_all_pp_2,how='outer',left_on=['player_id'],right_on=['player_id'],suffixes=("","_2"))
281
- df_final = df_final.merge( df_all_pp_3,how='outer',left_on=['player_id'],right_on=['player_id'],suffixes=("_1","_3"))
282
 
283
 
284
- team_report_1 = pd.read_html(f'https://www.naturalstattrick.com/teamtable.php?fromseason={season}&thruseason={season}&stype={seasontype}&sit=pp&score=all&rate=n&team=all&loc=B&gpf=c&gp='+str(n_1)+'&fd=&td=')[0]
285
- team_report_2 = pd.read_html(f'https://www.naturalstattrick.com/teamtable.php?fromseason={season}&thruseason={season}&stype={seasontype}&sit=pp&score=all&rate=n&team=all&loc=B&gpf=c&gp='+str(n_2)+'&fd=&td=')[0]
286
- team_report_3 = pd.read_html(f'https://www.naturalstattrick.com/teamtable.php?fromseason=fromseason={season}&thruseason={season}&stype={seasontype}&sit=pp&score=all&rate=n&team=all&loc=B&gpf=c&gp='+str(n_3)+'&fd=&td=')[0]
287
- team_report_1 = team_report_1.merge(team_abv,left_on=['Team'],right_on=['team_name'],how='left')
288
- team_report_2 = team_report_2.merge(team_abv,left_on=['Team'],right_on=['team_name'],how='left')
289
- team_report_3 = team_report_3.merge(team_abv,left_on=['Team'],right_on=['team_name'],how='left')
290
 
291
- test = df_final[['player_id','Player_1','Team_1','Position_1','TOI_1','TOI_2','TOI_3',]]
292
- test.columns = ['player_id','Player','Team','Position','TOI_1','TOI_2','TOI_3',]
293
- test = test.merge(team_report_1[['TOI','team_abv']],how='left',left_on=['Team'],right_on=['team_abv'], suffixes=('','_1_team'))
294
- test = test.merge(team_report_2[['TOI','team_abv']],how='left',left_on=['Team'],right_on=['team_abv'], suffixes=('','_2_team'))
295
- test = test.merge(team_report_3[['TOI','team_abv']],how='left',left_on=['Team'],right_on=['team_abv'], suffixes=('','_3_team'))
296
- test = test.fillna('0:00')
297
 
298
- test['TOI'] = [int(x[:-3])+int(x[-2:])*100/60/100 for x in test['TOI'] ]
299
- test['TOI_1'] = [x if x!='0:00' else 0 for x in test['TOI_1']]
300
- test['TOI_2'] = [x if x!='0:00' else 0 for x in test['TOI_2']]
301
- test['TOI_3'] = [x if x!='0:00' else 0 for x in test['TOI_3']]
302
 
303
- test['TOI_2_team'] = [int(x[:-3])+int(x[-2:])*100/60/100 for x in test['TOI_2_team'] ]
304
- test['TOI_3_team'] = [int(x[:-3])+int(x[-2:])*100/60/100 for x in test['TOI_3_team']]
305
 
306
- test['TOI'] = test['TOI'].astype(float)
307
 
308
- test['PP%_1'] = test['TOI_1'].astype(float)/ test['TOI'].astype(float)
309
- test['PP%_2'] = test['TOI_2'].astype(float)/ test['TOI_2_team'].astype(float)
310
- test['PP%_3'] = test['TOI_3'].astype(float)/ test['TOI_3_team'].astype(float)
311
- test = test.fillna(0)
312
- test['TOI_1'] = ["%d:%02d" % (int(x),(x*60)%60) for x in test['TOI_1'].astype(float)]
313
- test['TOI_2'] = ["%d:%02d" % (int(x),(x*60)%60) for x in test['TOI_2'].astype(float)]
314
- test['TOI_3'] = ["%d:%02d" % (int(x),(x*60)%60) for x in test['TOI_3'].astype(float)]
315
- test = test.drop(['team_abv','team_abv_2_team','team_abv_3_team','TOI','TOI_2_team','TOI_3_team'],axis=1)
316
- test.columns = ['player_id','Player','Team','Position','L'+str(n_1)+' PP TOI','L'+str(n_2)+' PP TOI','L'+str(n_3)+' PP TOI','L'+str(n_1)+' PP%','L'+str(n_2)+' PP%','L'+str(n_3)+' PP%']
317
 
318
- yahoo_df = yahoo_df_2.merge(yahoo_nhl_df,left_on = 'player_id',right_on='player_id_yahoo',suffixes=['','_y'])
319
- yahoo_df.nhl_id = yahoo_df.nhl_id.astype(float)
320
- test.player_id = test.player_id.astype(float)
 
321
 
322
- test = test.merge(right=yahoo_df,left_on='player_id',right_on='nhl_id',suffixes=['','_y'],how='left')
323
 
 
 
324
 
325
- print('Column List')
326
- print(test.columns)
 
327
 
 
328
 
329
- print(list_of_columns)
330
- test = test[list_of_columns]
331
- test = test.rename(columns={'percent_owned':'Roster%'})
332
-
333
 
334
- top_d_score = test[(test.Team==input.team_id())].sort_values(by=['L'+str(n_1)+' PP%'],ascending=False).reset_index(drop=True)
335
- top_d_score = top_d_score.head(min(len(top_d_score),top_n))
 
 
 
 
 
 
 
 
 
336
 
 
 
 
 
 
 
 
 
 
337
 
338
- #top_d_score.columns = list_of_columns_name
 
 
 
 
 
 
 
 
 
 
339
 
340
- cols = top_d_score.columns.tolist();
 
341
 
342
- top_d_score['Deployment'] = "PP2"
343
- top_d_score['Deployment'][0:5] = "PP1"
344
 
345
- # df_style_bang = top_d_score.head(10).style.background_gradient(cmap=co, subset=['L'+str(n_1)+' PP%','L'+str(n_2)+' PP%','L'+str(n_3)+' PP%','Roster%']).hide_index().set_properties(**{'Height': '12px'},**{'text-align': 'center'}).set_table_styles([{
346
- # 'selector': 'caption',
347
- # 'props': [
348
- # ('color', ''),
349
- # ('fontname', 'Century Gothic'),
350
- # ('font-size', '20px'),
351
- # ('font-style', 'italic'),
352
- # ('font-weight', ''),
353
- # ('text-align', 'centre'),
354
- # ]
355
 
356
- # },{'selector' :'th', 'props':[('text-align', 'center'),('Height','5px')]},{'selector' :'td', 'props':[('text-align', 'center'),('font-size', '13px'),('fontname', 'Century Gothic')]}]).format(
357
- # {'L'+str(n_1)+' PP%': '{:.0%}',
358
- # 'L'+str(n_2)+' PP%': '{:.0%}',
359
- # 'L'+str(n_3)+' PP%': '{:.0%}',
360
- # 'Roster%': '{:.0%}',
361
- # },)
362
 
363
- df_style_bang = top_d_score.head(input.top_n()).style.background_gradient(cmap=co,vmin=0,vmax=1, subset=[x for x in cols if x.endswith('PP%')]).set_properties(**{'border': '3 px'},overwrite=False).set_table_styles([{
 
 
 
 
 
 
 
 
 
 
 
 
 
364
  'selector': 'caption',
365
  'props': [
366
  ('color', ''),
@@ -371,24 +471,26 @@ def server(input, output, session):
371
  ('text-align', 'centre'),
372
  ]
373
 
374
- },{'selector' :'th', 'props':[('text-align', 'center'),('Height','px'),('color','black'),('border', '1px black solid !important')]},{'selector' :'td', 'props':[('text-align', 'center'),('font-size', '18px'),('color','black')]}],overwrite=False).set_properties(
375
- **{'background-color':'White','index':'White','min-width':'75px'},overwrite=False).set_table_styles(
 
376
  [{'selector': 'th:first-child', 'props': [('background-color', 'white')]}],overwrite=False).set_table_styles(
377
  [{'selector': 'tr:first-child', 'props': [('background-color', 'white')]}],overwrite=False).set_table_styles(
378
  [{'selector': 'tr', 'props': [('line-height', '20px')]}],overwrite=False).set_properties(
379
- **{'Height': '8px'},**{'text-align': 'center'},overwrite=False).hide_index().format(
380
- {'L'+str(n_1)+' PP%': '{:.0%}',
381
- 'L'+str(n_2)+' PP%': '{:.0%}',
382
- 'L'+str(n_3)+' PP%': '{:.0%}',
383
- 'Roster%': '{:.0%}',
384
- },)
385
-
386
- return df_style_bang
 
 
 
387
 
388
 
389
 
390
- # test = test.fillna(0)
391
- #test['PP TOI'] = ["%d:%02d" % (int(x),(x*60)%60) if x>0 else '0:00' for x in test['PP TOI']]
392
 
393
 
394
 
 
 
 
1
  import requests
2
  import pandas as pd
3
  import seaborn as sns
4
  import matplotlib.pyplot as plt
5
  from matplotlib.pyplot import figure
6
  from matplotlib.offsetbox import OffsetImage, AnnotationBbox
7
+ #from scipy import stats
8
  import matplotlib.lines as mlines
9
  import matplotlib.transforms as mtransforms
10
  import numpy as np
 
11
  #import plotly.express as px
12
  #!pip install chart_studio
13
+ # import chart_studio.tools as tls
14
+ #from bs4 import BeautifulSoup
15
  import matplotlib.pyplot as plt
16
  import numpy as np
17
  import matplotlib.font_manager as font_manager
18
  from datetime import datetime
19
  import pytz
20
+ from datetime import date
21
+ datetime.now(pytz.timezone('US/Pacific')).strftime('%B %d, %Y')
22
+ # Configure Notebook
23
+ #%matplotlib inline
24
+ plt.style.use('fivethirtyeight')
25
+ sns.set_context("notebook")
26
+ import warnings
27
+ warnings.filterwarnings('ignore')
28
+ #from urllib.request import urlopen
29
+ import json
30
+ from datetime import date, timedelta
31
+ #import dataframe_image as dfi
32
+ #from os import listdir
33
+ #from os.path import isfile, join
34
+ import datetime
35
+ import seaborn as sns
36
+ import os
37
+ import calendar
38
+ #from IPython.display import display, HTML
39
+ import matplotlib.image as mpimg
40
+ #from skimage import io
41
+ #import difflib
42
+
43
+
44
+ from datetime import datetime
45
+ import pytz
46
  datetime.now(pytz.timezone('US/Pacific')).strftime('%B %d, %Y')
47
  # Configure Notebook
48
  #%matplotlib inline
 
55
  # import yahoo_oauth
56
  import json
57
  #import openpyxl
58
+ #from sklearn import preprocessing
59
+ from PIL import Image
60
+ import logging
61
+ import matplotlib.patches as patches
62
+ from matplotlib.patches import Rectangle
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
  from matplotlib.font_manager import FontProperties
64
+ from matplotlib.offsetbox import OffsetImage, AnnotationBbox
65
 
66
+ import requests
67
+ #import pickle
68
+ import pandas as pd
69
+
70
+ # # Loop over the counter and format the API call
71
+ r = requests.get('https://statsapi.web.nhl.com/api/v1/schedule?startDate=2023-10-01&endDate=2024-06-01')
72
+ schedule = r.json()
73
 
74
+ def flatten(t):
75
+ return [item for sublist in t for item in sublist]
76
 
77
+ game_id = flatten([[x['gamePk'] for x in schedule['dates'][y]['games']] for y in range(0,len(schedule['dates']))])
78
+ game_date = flatten([[x['gameDate'] for x in schedule['dates'][y]['games']] for y in range(0,len(schedule['dates']))])
79
+ game_home = flatten([[x['teams']['home']['team']['name'] for x in schedule['dates'][y]['games']] for y in range(0,len(schedule['dates']))])
80
+ game_away = flatten([[x['teams']['away']['team']['name'] for x in schedule['dates'][y]['games']] for y in range(0,len(schedule['dates']))])
81
 
82
+ schedule_df = pd.DataFrame(data={'game_id': game_id, 'game_date' : game_date, 'game_home' : game_home, 'game_away' : game_away})
83
+ schedule_df.game_date = pd.to_datetime(schedule_df['game_date']).dt.tz_convert(tz='US/Eastern').dt.date
84
+ schedule_df = schedule_df.replace('Montréal Canadiens','Montreal Canadiens')
85
+ schedule_df.head()
86
 
87
+ team_abv = pd.read_csv('team_abv.csv')
88
+ yahoo_weeks = pd.read_csv('yahoo_weeks.csv')
89
+ #yahoo_weeks['Number'] = yahoo_weeks['Number'].astype(int)
90
+ yahoo_weeks['Start'] = pd.to_datetime(yahoo_weeks['Start'])
91
+ yahoo_weeks['End'] = pd.to_datetime(yahoo_weeks['End'])
92
+ yahoo_weeks.head(5)
93
+
94
+ def highlight_cols(s):
95
+ color = '#C2FEE9'
96
+ return 'background-color: %s' % color
97
+ def highlight_cells(val):
98
+ color = 'white' if val == ' ' else ''
99
+ return 'background-color: {}'.format(color)
100
 
101
+ import matplotlib.pyplot as plt
102
+ import matplotlib.colors
103
+ cmap_total = matplotlib.colors.LinearSegmentedColormap.from_list("", ["#56B4E9","#FFFFFF","#F0E442"])
104
+ cmap_off = matplotlib.colors.LinearSegmentedColormap.from_list("", ["#FFFFFF","#F0E442"])
105
+ cmap_back = matplotlib.colors.LinearSegmentedColormap.from_list("", ["#FFFFFF","#56B4E9"])
106
+ cmap_sum = matplotlib.colors.LinearSegmentedColormap.from_list("", ["#FFFFFF","#F0E442"])
107
+
108
+ schedule_df = schedule_df.merge(right=team_abv,left_on='game_away',right_on='team_name',how='inner',suffixes=['','_away'])
109
+ schedule_df = schedule_df.merge(right=team_abv,left_on='game_home',right_on='team_name',how='inner',suffixes=['','_home'])
110
+ schedule_df['away_sym'] = '@'
111
+ schedule_df['home_sym'] = 'vs'
112
+
113
+
114
+ #if not os.path.isfile('standings/standings_'+str(date.today())+'.csv'):
115
+ standings_df_old = pd.read_html('https://www.hockey-reference.com/leagues/NHL_2023_standings.html')[0].append(pd.read_html('https://www.hockey-reference.com/leagues/NHL_2023_standings.html')[1])
116
+ # standings_df_old.to_csv('standings/standings_'+str(date.today())+'.csv')
117
+ #standings_df_old = pd.read_csv('standings/standings_'+str(date.today())+'.csv',index_col=[0])
118
+
119
+ standings_df = standings_df_old[standings_df_old['Unnamed: 0'].str[-8:] != 'Division'].sort_values('Unnamed: 0').reset_index(drop=True).rename(columns={'Unnamed: 0':'Team'})#.drop(columns='Unnamed: 0')
120
+ #standings_df = standings_df.replace('St. Louis Blues','St Louis Blues')
121
+ standings_df['GF/GP'] = standings_df['GF'].astype(int)/standings_df['GP'].astype(int)
122
+ standings_df['GA/GP'] = standings_df['GA'].astype(int)/standings_df['GP'].astype(int)
123
+ standings_df['GF_Rank'] = standings_df['GF/GP'].rank(ascending=True,method='first')/10-1.65
124
+ standings_df['GA_Rank'] = standings_df['GA/GP'].rank(ascending=False,method='first')/10-1.65
125
+ standings_df.Team = standings_df.Team.str.strip('*')
126
+ standings_df = standings_df.merge(right=team_abv,left_on='Team',right_on='team_name')
127
+
128
+ schedule_stack = pd.DataFrame()
129
+ schedule_stack['date'] = pd.to_datetime(list(schedule_df['game_date'])+list(schedule_df['game_date']))
130
+ schedule_stack['team'] = list(schedule_df['team_name'])+list(schedule_df['team_name_home'])
131
+ schedule_stack['team_abv'] = list(schedule_df['team_abv'])+list(schedule_df['team_abv_home'])
132
+ schedule_stack['symbol'] = list(schedule_df['away_sym'])+list(schedule_df['home_sym'])
133
+ schedule_stack['team_opponent'] = list(schedule_df['team_name_home'])+list(schedule_df['team_name'])
134
+ schedule_stack['team_abv_home'] = list(schedule_df['team_abv_home'])+list(schedule_df['team_abv'])
135
+ schedule_stack = schedule_stack.merge(right=standings_df[['team_abv','GF_Rank']],left_on='team_abv',right_on='team_abv',how='inner',suffixes=("",'_y'))
136
+ schedule_stack = schedule_stack.merge(right=standings_df[['team_abv','GA_Rank']],left_on='team_abv_home',right_on='team_abv',how='inner',suffixes=("",'_y'))
137
+
138
+ schedule_stack = schedule_stack.merge(right=standings_df[['team_abv','GF_Rank']],left_on='team_abv',right_on='team_abv',how='inner',suffixes=("",'_y'))
139
+ schedule_stack = schedule_stack.merge(right=standings_df[['team_abv','GA_Rank']],left_on='team_abv_home',right_on='team_abv',how='inner',suffixes=("",'_y'))
140
+
141
+
142
+ list_o = schedule_stack.sort_values(['team','date'],ascending=[True,True]).reset_index(drop=True)
143
+ new_list = [x - y for x, y in zip(list_o['date'][1:], list_o['date'])]
144
+ b2b_list = [0] + [x.days for x in new_list]
145
+ b2b_list = [1 if x==1 else 0 for x in b2b_list]
146
+ test = list(schedule_stack.groupby(by='date').count()['team'])
147
+ offnight = [1 if x<15 else 0 for x in test]
148
+ offnight_df = pd.DataFrame({'date':schedule_stack.sort_values('date').date.unique(),'offnight':offnight}).sort_values('date').reset_index(drop=True)
149
+ schedule_stack = schedule_stack.merge(right=offnight_df,left_on='date',right_on='date',how='right')
150
+ schedule_stack = schedule_stack.sort_values(['team','date'],ascending=[True,True]).reset_index(drop=True)
151
+ schedule_stack['b2b'] = b2b_list
152
+
153
+ schedule_stack.date = pd.to_datetime(schedule_stack.date)
154
+
155
+ away_b2b = []
156
+ home_b2b = []
157
+ for i in range(0,len(schedule_stack)):
158
+ away_b2b.append(schedule_stack[(schedule_stack.date[i]==schedule_stack.date)&(schedule_stack.team_opponent[i]==schedule_stack.team)].reset_index(drop=True)['b2b'][0])
159
+ home_b2b.append(schedule_stack[(schedule_stack.date[i]==schedule_stack.date)&(schedule_stack.team[i]==schedule_stack.team)].reset_index(drop=True)['b2b'][0])
160
+
161
+ schedule_stack['away_b2b'] = away_b2b
162
+ schedule_stack['home_b2b'] = home_b2b
163
+
164
+ schedule_stack['away_b2b'] = schedule_stack['away_b2b'].replace(1,' &#128564;')
165
+ schedule_stack['away_b2b'] = schedule_stack['away_b2b'].replace(0,'')
166
+ schedule_stack.head()
167
+
168
+ FontProperties(fname='/System/Library/Fonts/Apple Color Emoji.ttc')
169
+
170
+ data_r = requests.get("https://pub-api-ro.fantasysports.yahoo.com/fantasy/v2/league/427.l.public;out=settings/players;position=ALL;start=0;count=3000;sort=rank_season;search=;out=percent_owned;out=auction_values,ranks;ranks=season;ranks_by_position=season;out=expert_ranks;expert_ranks.rank_type=projected_season_remaining/draft_analysis;cut_types=diamond;slices=last7days?format=json_f").json()
171
 
172
  total_list = []
173
 
 
189
  single_list.append(0)
190
  total_list.append(single_list)
191
 
192
+ df_2023 = pd.DataFrame(data=total_list,columns=['player_id','rank_value','full','first','last','average_pick', 'average_cost','display_position','editorial_team_abbr','percent_owned'])
193
 
194
+ week_dict = yahoo_weeks.set_index('Number')['Week'].sort_index().to_dict()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
195
 
196
+ from shiny import ui, render, App
197
+ import matplotlib.image as mpimg
198
+ # app_ui = ui.page_fluid(
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
199
 
200
+ # # ui.output_plot("plot"),
201
+ # #ui.h2('MLB Batter Launch Angle vs Exit Velocity'),
202
+ # ui.layout_sidebar(
203
+ # ui.panel_sidebar(
204
+ # ui.input_select("id", "Select Batter",batter_dict),
205
 
206
+ # ui.input_select("plot_id", "Select Plot",{'scatter':'Scatter Plot','dist':'Distribution Plot'})))
207
+ # ,
208
 
209
+ # ui.panel_main(ui.output_plot("plot",height = "750px",width="1250px")),
210
+ # #ui.download_button('test','Download'),
211
+ # )
212
+ import shinyswatch
213
  app_ui = ui.page_fluid(
214
+ #shinyswatch.theme.cosmo(),
 
215
  ui.layout_sidebar(
216
+
217
+ # Available themes:
218
+ # cerulean, cosmo, cyborg, darkly, flatly, journal, litera, lumen, lux,
219
+ # materia, minty, morph, pulse, quartz, sandstone, simplex, sketchy, slate,
220
+ # solar, spacelab, superhero, united, vapor, yeti, zephyr
221
+
222
  ui.panel_sidebar(
223
+ ui.input_select("week_id", "Select Week (Set as Season for Custom Date Range)",week_dict,width=1),
224
+ ui.input_select("sort_id", "Sort Column",['Score','Team','Total','Off-Night','B2B'],width=1),
225
+ ui.input_switch("a_d_id", "Ascending?"),
226
+ #ui.input_select("date_id", "Select Date",yahoo_weeks['Week'],width=1),
227
+ ui.input_date_range("date_range_id", "Date range input",start = datetime.today().date(), end = datetime.today().date() + timedelta(days=6)),
228
+ ui.output_table("result"),width=3),
229
+
230
+
231
  ui.panel_main(ui.tags.h3(""),
232
  ui.div({"style": "font-size:2em;"},ui.output_text("txt_title")),
233
  #ui.tags.h2("Fantasy Hockey Schedule Summary"),
234
+ ui.tags.h5("Created By: @TJStats, Data: NHL"),
235
  ui.div({"style": "font-size:1.2em;"},ui.output_text("txt")),
236
+ ui.output_table("schedule_result"),
237
+ ui.tags.h5('Legend'),
238
+ ui.output_table("schedule_result_legend"),
239
+ ui.tags.h6('An Off Night is defined as a day in which less than half the teams in the NHL are playing'),
240
+ ui.tags.h6('The scores are determined by using games played, off-nights, B2B, and strength of opponents') )
241
+
242
+ ))
243
+ # ui.row(
244
+ # ui.column(
245
+ # 3,
246
+ # ui.input_date("x", "Date input"),),
247
+ # ui.column(
248
+ # 1,
249
+ # ui.input_select("level_id", "Select Level",level_dict,width=1)),
250
+ # ui.column(
251
+ # 3,
252
+ # ui.input_select("stat_id", "Select Stat",plot_dict_small,width=1)),
253
+ # ui.column(
254
+ # 2,
255
+ # ui.input_numeric("n", "Rolling Window Size", value=50)),
256
+ # ),
257
+ # ui.output_table("result_batters")),
258
+
259
+ # ui.nav(
260
+ # "Pitchers",
261
+
262
+ # ui.row(
263
+ # ui.column(
264
+ # 3,
265
+ # ui.input_select("id_pitch", "Select Pitcher",pitcher_dict,width=1,selected=675911),
266
+ # ),
267
+ # ui.column(
268
+ # 1,
269
+ # ui.input_select("level_id_pitch", "Select Level",level_dict,width=1)),
270
+ # ui.column(
271
+ # 3,
272
+ # ui.input_select("stat_id_pitch", "Select Stat",plot_dict_small_pitch,width=1)),
273
+ # ui.column(
274
+ # 2,
275
+ # ui.input_numeric("n_pitch", "Rolling Window Size", value=50)),
276
+ # ),
277
+ # ui.output_table("result_pitchers")),
278
+ # )
279
+ # )
280
+ # )
281
 
282
 
283
 
284
 
285
  from urllib.request import Request, urlopen
 
 
286
  # importing OpenCV(cv2) module
287
 
288
 
289
 
290
 
 
291
  def server(input, output, session):
292
 
 
293
  @output
294
  @render.text
295
  def txt():
296
+
297
+ week_set = int(input.week_id())
298
+ if week_set != 0:
299
+ if pd.to_datetime(yahoo_weeks[yahoo_weeks.Number == week_set]['Start'].values[0]).year != pd.to_datetime(yahoo_weeks[yahoo_weeks.Number == week_set]['End'].values[0]).year:
300
+
301
+ return f'{pd.to_datetime(yahoo_weeks[yahoo_weeks.Number == week_set]["Start"].values[0]).strftime("%B %d, %Y")} to {pd.to_datetime(yahoo_weeks[yahoo_weeks.Number == week_set]["End"].values[0]).strftime("%B %d, %Y")}'
302
+ else:
303
+ if pd.to_datetime(yahoo_weeks[yahoo_weeks.Number == week_set]["Start"].values[0]).month != pd.to_datetime(yahoo_weeks[yahoo_weeks.Number == week_set]["End"].values[0]).month:
304
+ return f'{pd.to_datetime(yahoo_weeks[yahoo_weeks.Number == week_set]["Start"].values[0]).strftime("%B %d")} to {pd.to_datetime(yahoo_weeks[yahoo_weeks.Number == week_set]["End"].values[0]).strftime("%B %d, %Y")}'
305
+ else:
306
+ return f'{pd.to_datetime(yahoo_weeks[yahoo_weeks.Number == week_set]["Start"].values[0]).strftime("%B %d")} to {pd.to_datetime(yahoo_weeks[yahoo_weeks.Number == week_set]["End"].values[0]).strftime("%d, %Y")}'
307
+ else:
308
+ if input.date_range_id()[0].year != input.date_range_id()[1].year:
309
+
310
+ return f'{input.date_range_id()[0].strftime("%B %d, %Y")} to {input.date_range_id()[1].strftime("%B %d, %Y")}'
311
+ else:
312
+ if input.date_range_id()[0].month != input.date_range_id()[1].month:
313
+ return f'{input.date_range_id()[0].strftime("%B %d")} to {input.date_range_id()[1].strftime("%B %d, %Y")}'
314
+ else:
315
+ return f'{input.date_range_id()[0].strftime("%B %d")} to {input.date_range_id()[1].strftime("%d, %Y")}'
316
+
317
+
318
  @output
319
  @render.text
320
  def txt_title():
321
+ week_set = int(input.week_id())
322
+ if week_set != 0:
323
+ return f'Fantasy Hockey Schedule Summary - Yahoo - Week {input.week_id()}'
324
+ else:
325
+ return f'Fantasy Hockey Schedule Summary'
326
 
327
+ @output
328
+ @render.table
329
+ def result():
330
+ #print(yahoo_weeks)
331
+ return yahoo_weeks[['Week','Start','End']]
332
 
333
+ @output
334
+ @render.table
335
+ def schedule_result():
336
+
337
 
338
+ week_set = int(input.week_id())
339
+ print(week_set)
340
 
341
+ if week_set == 0:
342
+ start_point = input.date_range_id()[0]
343
+ end_point = input.date_range_id()[1]
344
+ else:
345
+ start_point = yahoo_weeks[yahoo_weeks.Number==week_set].reset_index(drop=True)['Start'][0]
346
+ end_point = yahoo_weeks[yahoo_weeks.Number==week_set].reset_index(drop=True)['End'][0]
347
 
 
 
 
348
 
349
+ sort_value='Score'
350
+ ascend=False
 
 
351
 
352
+ weekly_stack = schedule_stack[(schedule_stack['date'].dt.date>=start_point)&(schedule_stack['date'].dt.date<=end_point)]
353
+ date_list = pd.date_range(start_point,end_point,freq='d')
354
+ test_list = [[]] * len(date_list)
355
 
 
 
356
 
 
 
 
 
 
 
 
 
357
 
358
+ for i in range(0,len(date_list)):
359
+ test_list[i] = team_abv.merge(right=weekly_stack[weekly_stack['date']==date_list[i]],left_on='team_abv',right_on='team_abv',how='left')
360
+ test_list[i] = test_list[i].fillna("")
361
+ test_list[i]['new_text'] = test_list[i]['symbol'] + ' '+ test_list[i]['team_abv_home'] + test_list[i]['away_b2b']
 
362
 
 
 
363
 
364
+ test_df = pd.DataFrame()
365
+ test_df['Team'] = list(team_abv['team_abv'])
366
+ test_df['Total'] = test_df.merge(right=weekly_stack.groupby('team_abv')['team_abv'].apply(lambda x: x[x != ''].count()),left_on=['Team'],right_index=True,how='left').fillna(0)['team_abv']
367
+ test_df['Off-Night'] = test_df.merge(right=weekly_stack.groupby('team_abv').sum()['offnight'],left_on=['Team'],right_index=True,how='left').fillna(0)['offnight']
368
+ test_df['B2B']= test_df.merge(right=weekly_stack.groupby('team_abv').sum()['b2b'],left_on=['Team'],right_index=True,how='left').fillna(0)['b2b']
369
 
370
 
 
 
 
371
 
372
+ gf_rank = np.array(test_df.merge(right=weekly_stack.groupby('team_abv').mean()['GF_Rank'],left_on=['Team'],right_index=True,how='left').fillna(0)['GF_Rank'])
373
+ ga_rank = np.array(test_df.merge(right=weekly_stack.groupby('team_abv').mean()['GA_Rank'],left_on=['Team'],right_index=True,how='left').fillna(0)['GA_Rank'])
374
 
375
 
376
+ #games_vs_tired = np.array([float(i)*0.4 for i in list(weekly_stack.groupby('team_abv')['away_b2b'].apply(lambda x: x[x != ''].count()))])
 
 
 
 
 
377
 
378
+ games_vs_tired = 0.4*np.array(test_df.merge(right=weekly_stack.groupby('team_abv')['away_b2b'].apply(lambda x: x[x != ''].count()),left_on=['Team'],right_index=True,how='left').fillna(0)['away_b2b'])
 
 
 
 
 
379
 
 
 
 
 
380
 
381
+ team_score = test_df['Total']+test_df['Off-Night']*0.5+test_df['B2B']*-0.2+games_vs_tired*0.3+gf_rank*0.1+ga_rank*0.1
 
382
 
383
+ test_df['Score'] = team_score
384
 
 
 
 
 
 
 
 
 
 
385
 
386
+ cols = test_df.columns.tolist();
387
+ L = len(cols)
388
+ test_df = test_df[cols[4:]+cols[0:4]]
389
+ #return test_df#[cols[4:]+cols[0:4]]
390
 
391
+ test_df = test_df.sort_values(by=[sort_value,'Score'],ascending = ascend)
392
 
393
+ for i in range(0,len(date_list)):
394
+ test_df[calendar.day_name[date_list[i].weekday()]+'<br>'+str(date_list[i].month)+'-'+'{:02d}'.format(date_list[i].day)] = test_list[i]['new_text']
395
 
396
+ row = ['']*L
397
+ for x in test_df[test_df.columns[L:]]:
398
+ row.append(int(sum(test_df[x]!=" ")/2))
399
 
400
+ test_df = test_df.sort_values(by=input.sort_id(),ascending=input.a_d_id())
401
 
402
+ test_df.loc[32] = row
403
+ #test_df_html = HTML( test_df.to_html().replace("\\n","<br>") )
404
+ offnight_list = [True if x <8 else False for x in test_df.iloc[-1][L:]]
 
405
 
406
+ test_df.style.applymap(highlight_cols,subset = ((list(test_df.index[:-1]),test_df.columns[L:][offnight_list])))
407
+ test_df_style = test_df.style.set_properties(**{'border': '3 px'},overwrite=False).set_table_styles([{
408
+ 'selector': 'caption',
409
+ 'props': [
410
+ ('color', ''),
411
+ ('fontname', 'Century Gothic'),
412
+ ('font-size', '20px'),
413
+ ('font-style', 'italic'),
414
+ ('font-weight', ''),
415
+ ('text-align', 'centre'),
416
+ ]
417
 
418
+ },{'selector' :'th', 'props':[('text-align', 'center'),('Height','px'),('color','black'),('border', '1px black solid !important')]},{'selector' :'td', 'props':[('text-align', 'center'),('font-size', '18px'),('color','black')]}],overwrite=False).set_properties(
419
+ **{'background-color':'White','index':'White','min-width':'75px'},overwrite=False).set_properties(
420
+ **{'background-color':'White','index':'White','min-width':'100px'},overwrite=False,subset = ((list(test_df.index[:]),test_df.columns[5:]))).set_table_styles(
421
+ [{'selector': 'th:first-child', 'props': [('background-color', 'white')]}],overwrite=False).set_table_styles(
422
+ [{'selector': 'tr:first-child', 'props': [('background-color', 'white')]}],overwrite=False).set_table_styles(
423
+ [{'selector': 'tr', 'props': [('line-height', '20px')]}],overwrite=False).set_properties(
424
+ **{'Height': '8px'},**{'text-align': 'center'},overwrite=False).hide_index()
425
+
426
+ test_df_style = test_df_style.applymap(highlight_cols,subset = ((list(test_df.index[:-1]),test_df.columns[L:][offnight_list])))
427
 
428
+ test_df_style = test_df_style.applymap(highlight_cells)
429
+ test_df_style = test_df_style.background_gradient(cmap=cmap_total,subset = ((list(test_df.index[:-1]),test_df.columns[0])))
430
+ test_df_style = test_df_style.background_gradient(cmap=cmap_total,vmin=0,vmax=np.max(test_df.Total[:len(test_df)-1]),subset = ((list(test_df.index[:-1]),test_df.columns[2])))
431
+ test_df_style = test_df_style.background_gradient(cmap=cmap_off,subset = ((list(test_df.index[:-1]),test_df.columns[3])))
432
+ test_df_style = test_df_style.background_gradient(cmap=cmap_back,subset = ((list(test_df.index[:-1]),test_df.columns[4])))
433
+ test_df_style = test_df_style.background_gradient(cmap=cmap_sum,subset = ((list(test_df.index[-1:]),test_df.columns[L:])),axis=1)
434
+ test_df_style = test_df_style.set_properties(
435
+ **{'border': '1px black solid !important'},subset = ((list(test_df.index[:-1]),test_df.columns[:]))).set_properties(
436
+ **{'min-width':'85px'},subset = ((list(test_df.index[:-1]),test_df.columns[L:])),overwrite=False).set_properties(**{
437
+ 'color': 'black'},overwrite=False).set_properties(
438
+ **{'border': '1px black solid !important'},subset = ((list(test_df.index[:]),test_df.columns[L:])))
439
 
440
+ test_df_style = test_df_style.format(
441
+ '{:.0f}',subset=(test_df.index[:-1],test_df.columns[2:L]))
442
 
443
+ test_df_style = test_df_style.format(
444
+ '{:.1f}',subset=(test_df.index[:-1],test_df.columns[0]))
445
 
 
 
 
 
 
 
 
 
 
 
446
 
447
+ print('made it to teh end')
448
+ return test_df_style
 
 
 
 
449
 
450
+
451
+ #return exit_velo_df_codes_summ_time_style_set
452
+
453
+ # @output
454
+ # @render.plot(alt="A histogram")
455
+ # def plot_pitch():
456
+ # p
457
+ @output
458
+ @render.table
459
+ def schedule_result_legend():
460
+
461
+ off_b2b_df = pd.DataFrame(data={'off':'Off-Night','b2b':'Tired Opp. &#128564;'},index=[0])
462
+ #off_b2b_df.style.applymap(highlight_cols,subset = ((list(off_b2b_df.index[:-1]),off_b2b_df.columns[0])))
463
+ off_b2b_df_style = off_b2b_df.style.set_properties(**{'border': '3 px'},overwrite=False).set_table_styles([{
464
  'selector': 'caption',
465
  'props': [
466
  ('color', ''),
 
471
  ('text-align', 'centre'),
472
  ]
473
 
474
+ },{'selector' :'th', 'props':[('text-align', 'center'),('Height','px'),('color','black'),(
475
+ 'border', '1px black solid !important')]},{'selector' :'td', 'props':[('text-align', 'center'),('font-size', '18px'),('color','black')]}],overwrite=False).set_properties(
476
+ **{'background-color':'White','index':'White','min-width':'150px'},overwrite=False).set_table_styles(
477
  [{'selector': 'th:first-child', 'props': [('background-color', 'white')]}],overwrite=False).set_table_styles(
478
  [{'selector': 'tr:first-child', 'props': [('background-color', 'white')]}],overwrite=False).set_table_styles(
479
  [{'selector': 'tr', 'props': [('line-height', '20px')]}],overwrite=False).set_properties(
480
+ **{'Height': '8px'},**{'text-align': 'center'},overwrite=False).set_properties(
481
+ **{'background-color':'#C2FEE9'},subset=off_b2b_df.columns[0]).set_properties(
482
+ **{'color':'black'},subset=off_b2b_df.columns[:]).hide_index().set_table_styles([
483
+ {'selector': 'thead', 'props': [('display', 'none')]}
484
+ ]).set_properties(**{'border': '3 px','color':'black'},overwrite=False).set_properties(
485
+ **{'border': '1px black solid !important'},subset = ((list(off_b2b_df.index[:]),off_b2b_df.columns[:]))).set_properties(
486
+ **{'min-width':'130'},subset = ((list(off_b2b_df.index[:]),off_b2b_df.columns[:])),overwrite=False).set_properties(**{
487
+ 'color': 'black'},overwrite=False).set_properties(
488
+ **{'border': '1px black solid !important'},subset = ((list(off_b2b_df.index[:]),off_b2b_df.columns[:])))
489
+
490
+ return off_b2b_df_style
491
 
492
 
493
 
 
 
494
 
495
 
496