Multichem commited on
Commit
45d7395
·
verified ·
1 Parent(s): 7fd175a

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +358 -0
app.py ADDED
@@ -0,0 +1,358 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pulp
2
+ import numpy as np
3
+ import pandas as pd
4
+ import streamlit as st
5
+ import gspread
6
+ from itertools import combinations
7
+
8
+ scope = ['https://www.googleapis.com/auth/spreadsheets',
9
+ "https://www.googleapis.com/auth/drive"]
10
+
11
+ credentials = {
12
+ "type": "service_account",
13
+ "project_id": "sheets-api-connect-378620",
14
+ "private_key_id": "1005124050c80d085e2c5b344345715978dd9cc9",
15
+ "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtKa01beXwc88R\nnPZVQTNPVQuBnbwoOfc66gW3547ja/UEyIGAF112dt/VqHprRafkKGmlg55jqJNt\na4zceLKV+wTm7vBu7lDISTJfGzCf2TrxQYNqwMKE2LOjI69dBM8u4Dcb4k0wcp9v\ntW1ZzLVVuwTvmrg7JBHjiSaB+x5wxm/r3FOiJDXdlAgFlytzqgcyeZMJVKKBQHyJ\njEGg/1720A0numuOCt71w/2G0bDmijuj1e6tH32MwRWcvRNZ19K9ssyDz2S9p68s\nYDhIxX69OWxwScTIHLY6J2t8txf/XMivL/636fPlDADvBEVTdlT606n8CcKUVQeq\npUVdG+lfAgMBAAECggEAP38SUA7B69eTfRpo658ycOs3Amr0JW4H/bb1rNeAul0K\nZhwd/HnU4E07y81xQmey5kN5ZeNrD5EvqkZvSyMJHV0EEahZStwhjCfnDB/cxyix\nZ+kFhv4y9eK+kFpUAhBy5nX6T0O+2T6WvzAwbmbVsZ+X8kJyPuF9m8ldcPlD0sce\ntj8NwVq1ys52eosqs7zi2vjt+eMcaY393l4ls+vNq8Yf27cfyFw45W45CH/97/Nu\n5AmuzlCOAfFF+z4OC5g4rei4E/Qgpxa7/uom+BVfv9G0DIGW/tU6Sne0+37uoGKt\nW6DzhgtebUtoYkG7ZJ05BTXGp2lwgVcNRoPwnKJDxQKBgQDT5wYPUBDW+FHbvZSp\nd1m1UQuXyerqOTA9smFaM8sr/UraeH85DJPEIEk8qsntMBVMhvD3Pw8uIUeFNMYj\naLmZFObsL+WctepXrVo5NB6RtLB/jZYxiKMatMLUJIYtcKIp+2z/YtKiWcLnwotB\nWdCjVnPTxpkurmF2fWP/eewZ+wKBgQDRMtJg7etjvKyjYNQ5fARnCc+XsI3gkBe1\nX9oeXfhyfZFeBXWnZzN1ITgFHplDznmBdxAyYGiQdbbkdKQSghviUQ0igBvoDMYy\n1rWcy+a17Mj98uyNEfmb3X2cC6WpvOZaGHwg9+GY67BThwI3FqHIbyk6Ko09WlTX\nQpRQjMzU7QKBgAfi1iflu+q0LR+3a3vvFCiaToskmZiD7latd9AKk2ocsBd3Woy9\n+hXXecJHPOKV4oUJlJgvAZqe5HGBqEoTEK0wyPNLSQlO/9ypd+0fEnArwFHO7CMF\nycQprAKHJXM1eOOFFuZeQCaInqdPZy1UcV5Szla4UmUZWkk1m24blHzXAoGBAMcA\nyH4qdbxX9AYrC1dvsSRvgcnzytMvX05LU0uF6tzGtG0zVlub4ahvpEHCfNuy44UT\nxRWW/oFFaWjjyFxO5sWggpUqNuHEnRopg3QXx22SRRTGbN45li/+QAocTkgsiRh1\nqEcYZsO4mPCsQqAy6E2p6RcK+Xa+omxvSnVhq0x1AoGAKr8GdkCl4CF6rieLMAQ7\nLNBuuoYGaHoh8l5E2uOQpzwxVy/nMBcAv+2+KqHEzHryUv1owOi6pMLv7A9mTFoS\n18B0QRLuz5fSOsVnmldfC9fpUc6H8cH1SINZpzajqQA74bPwELJjnzrCnH79TnHG\nJuElxA33rFEjbgbzdyrE768=\n-----END PRIVATE KEY-----\n",
16
+ "client_email": "gspread-connection@sheets-api-connect-378620.iam.gserviceaccount.com",
17
+ "client_id": "106625872877651920064",
18
+ "auth_uri": "https://accounts.google.com/o/oauth2/auth",
19
+ "token_uri": "https://oauth2.googleapis.com/token",
20
+ "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
21
+ "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40sheets-api-connect-378620.iam.gserviceaccount.com"
22
+ }
23
+
24
+ gc = gspread.service_account_from_dict(credentials)
25
+
26
+ st.set_page_config(layout="wide")
27
+
28
+ game_format = {'Win Percentage': '{:.2%}','First Inning Lead Percentage': '{:.2%}',
29
+ 'Fifth Inning Lead Percentage': '{:.2%}', '8+ runs': '{:.2%}', 'DK LevX': '{:.2%}', 'FD LevX': '{:.2%}'}
30
+
31
+ player_roo_format = {'Top_finish': '{:.2%}','Top_5_finish': '{:.2%}', 'Top_10_finish': '{:.2%}', '20+%': '{:.2%}', '2x%': '{:.2%}', '3x%': '{:.2%}',
32
+ '4x%': '{:.2%}','GPP%': '{:.2%}'}
33
+
34
+ all_dk_player_projections = 'https://docs.google.com/spreadsheets/d/1I_1Ve3F4tftgfLQQoRKOJ351XfEG48s36OxXUKxmgS8/edit#gid=1391856348'
35
+
36
+ @st.cache_data
37
+ def set_slate_teams():
38
+ sh = gc.open_by_url(all_dk_player_projections)
39
+ worksheet = sh.worksheet('Site_Info')
40
+ raw_display = pd.DataFrame(worksheet.get_all_records())
41
+
42
+ return raw_display
43
+
44
+ @st.cache_data
45
+ def player_stat_table():
46
+ sh = gc.open_by_url(all_dk_player_projections)
47
+ worksheet = sh.worksheet('Player_Projections')
48
+ raw_display = pd.DataFrame(worksheet.get_all_records())
49
+
50
+ return raw_display
51
+
52
+ @st.cache_data
53
+ def load_dk_player_projections():
54
+ sh = gc.open_by_url(all_dk_player_projections)
55
+ worksheet = sh.worksheet('DK_ROO')
56
+ load_display = pd.DataFrame(worksheet.get_all_records())
57
+ load_display.replace('', np.nan, inplace=True)
58
+ raw_display = load_display.dropna(subset=['Median'])
59
+
60
+ return raw_display
61
+
62
+ @st.cache_data
63
+ def load_fd_player_projections():
64
+ sh = gc.open_by_url(all_dk_player_projections)
65
+ worksheet = sh.worksheet('FD_ROO')
66
+ load_display = pd.DataFrame(worksheet.get_all_records())
67
+ load_display.replace('', np.nan, inplace=True)
68
+ raw_display = load_display.dropna(subset=['Median'])
69
+
70
+ return raw_display
71
+
72
+ @st.cache_data
73
+ def load_dk_stacks():
74
+ sh = gc.open_by_url(all_dk_player_projections)
75
+ worksheet = sh.worksheet('DK_Stacks')
76
+ load_display = pd.DataFrame(worksheet.get_all_records())
77
+ raw_display = load_display
78
+
79
+ return raw_display
80
+
81
+ @st.cache_data
82
+ def load_fd_stacks():
83
+ sh = gc.open_by_url(all_dk_player_projections)
84
+ worksheet = sh.worksheet('FD_Stacks')
85
+ load_display = pd.DataFrame(worksheet.get_all_records())
86
+ raw_display = load_display
87
+
88
+ return raw_display
89
+
90
+ @st.cache_data
91
+ def convert_df_to_csv(df):
92
+ return df.to_csv().encode('utf-8')
93
+
94
+ player_stats = player_stat_table()
95
+ dk_roo_raw = load_dk_player_projections()
96
+ fd_roo_raw = load_fd_player_projections()
97
+ t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
98
+ site_slates = set_slate_teams()
99
+ col1, col2 = st.columns([1, 5])
100
+
101
+ tab1, tab2 = st.tabs(['Uploads and Info', 'Stack Finder'])
102
+
103
+ with tab1:
104
+ st.info("The Projections file can have any columns in any order, but must contain columns explicitly named: 'Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', and 'Own'.")
105
+ col1, col2 = st.columns([1, 5])
106
+
107
+ with col1:
108
+ proj_file = st.file_uploader("Upload Projections File", key = 'proj_uploader')
109
+
110
+ if proj_file is not None:
111
+ try:
112
+ proj_dataframe = pd.read_csv(proj_file)
113
+ except:
114
+ proj_dataframe = pd.read_excel(proj_file)
115
+ with col2:
116
+ if proj_file is not None:
117
+ st.dataframe(proj_dataframe.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True)
118
+
119
+ with tab2:
120
+ col1, col2 = st.columns([1, 5])
121
+
122
+ with col1:
123
+ st.info(t_stamp)
124
+ if st.button("Load/Reset Data", key='reset1'):
125
+ st.cache_data.clear()
126
+ player_stats = player_stat_table()
127
+ dk_roo_raw = load_dk_player_projections()
128
+ fd_roo_raw = load_fd_player_projections()
129
+ t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
130
+ site_slates = set_slate_teams()
131
+ slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Thurs-Mon Slate', 'User'), key='slate_var1')
132
+ site_var1 = st.radio("What site are you playing?", ('Draftkings', 'Fanduel'), key='site_var1')
133
+
134
+ if site_var1 == 'Draftkings':
135
+ if slate_var1 == 'User':
136
+ raw_baselines = proj_dataframe
137
+ qb_lookup = raw_baselines[raw_baselines['Position'] == 'QB']
138
+ elif slate_var1 != 'User':
139
+ raw_baselines = dk_roo_raw[dk_roo_raw['slate'] == str(slate_var1)]
140
+ raw_baselines = raw_baselines[raw_baselines['version'] == 'overall']
141
+ qb_lookup = raw_baselines[raw_baselines['Position'] == 'QB']
142
+ elif site_var1 == 'Fanduel':
143
+ if slate_var1 == 'User':
144
+ raw_baselines = proj_dataframe
145
+ qb_lookup = raw_baselines[raw_baselines['Position'] == 'QB']
146
+ elif slate_var1 != 'User':
147
+ raw_baselines = fd_roo_raw[fd_roo_raw['slate'] == str(slate_var1)]
148
+ raw_baselines = raw_baselines[raw_baselines['version'] == 'overall']
149
+ qb_lookup = raw_baselines[raw_baselines['Position'] == 'QB']
150
+ split_var1 = st.radio("Would you like to run stack analysis for the full slate or individual teams?", ('Full Slate Run', 'Specific Teams'), key='split_var1')
151
+ if split_var1 == 'Specific Teams':
152
+ team_var1 = st.multiselect('Which teams would you like to include in the analysis?', options = raw_baselines['Team'].unique(), key='team_var1')
153
+ elif split_var1 == 'Full Slate Run':
154
+ team_var1 = raw_baselines.Team.unique().tolist()
155
+ pos_split1 = st.radio("Are you viewing all positions, specific groups, or specific positions?", ('All Positions', 'Specific Positions'), key='pos_split1')
156
+ if pos_split1 == 'Specific Positions':
157
+ pos_var1 = st.multiselect('What Positions would you like to view?', options = ['QB', 'WR', 'TE'])
158
+ elif pos_split1 == 'All Positions':
159
+ pos_var1 = 'All'
160
+ if site_var1 == 'Draftkings':
161
+ max_sal1 = st.number_input('Max Salary', min_value = 5000, max_value = 50000, value = 35000, step = 100, key='max_sal1')
162
+ elif site_var1 == 'Fanduel':
163
+ max_sal1 = st.number_input('Max Salary', min_value = 5000, max_value = 35000, value = 25000, step = 100, key='max_sal1')
164
+ size_var1 = st.selectbox('What size of stacks are you analyzing?', options = ['QB+1', 'QB+2'])
165
+ if size_var1 == 'QB+1':
166
+ stack_size = 2
167
+ elif size_var1 == 'QB+2':
168
+ stack_size = 3
169
+
170
+ team_dict = dict(zip(raw_baselines.Player, raw_baselines.Team))
171
+ proj_dict = dict(zip(raw_baselines.Player, raw_baselines.Median))
172
+ own_dict = dict(zip(raw_baselines.Player, raw_baselines.Own))
173
+ cost_dict = dict(zip(raw_baselines.Player, raw_baselines.Salary))
174
+ qb_dict = dict(zip(qb_lookup.Team, qb_lookup.Player))
175
+
176
+ with col2:
177
+ stack_hold_container = st.empty()
178
+ if st.button('Run stack analysis'):
179
+ comb_list = []
180
+ if pos_split1 == 'All Positions':
181
+ raw_baselines = raw_baselines
182
+ elif pos_split1 != 'All Positions':
183
+ raw_baselines = raw_baselines[raw_baselines['Position'].str.contains('|'.join(pos_var1))]
184
+
185
+ for cur_team in team_var1:
186
+ working_baselines = raw_baselines
187
+ working_baselines = working_baselines[working_baselines['Team'] == cur_team]
188
+ working_baselines = working_baselines[working_baselines['Position'] != 'RB']
189
+ working_baselines = working_baselines[working_baselines['Position'] != 'DST']
190
+ qb_var = qb_dict[cur_team]
191
+ order_list = working_baselines['Player']
192
+
193
+ comb = combinations(order_list, stack_size)
194
+
195
+ for i in list(comb):
196
+ if qb_var in i:
197
+ comb_list.append(i)
198
+
199
+ comb_DF = pd.DataFrame(comb_list)
200
+
201
+ if stack_size == 2:
202
+ comb_DF['Team'] = comb_DF[0].map(team_dict)
203
+
204
+ comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict),
205
+ comb_DF[1].map(proj_dict)])
206
+
207
+ comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict),
208
+ comb_DF[1].map(cost_dict)])
209
+
210
+ comb_DF['Own%'] = sum([comb_DF[0].map(own_dict),
211
+ comb_DF[1].map(own_dict)])
212
+ elif stack_size == 3:
213
+ comb_DF['Team'] = comb_DF[0].map(team_dict)
214
+
215
+ comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict),
216
+ comb_DF[1].map(proj_dict),
217
+ comb_DF[2].map(proj_dict)])
218
+
219
+ comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict),
220
+ comb_DF[1].map(cost_dict),
221
+ comb_DF[2].map(cost_dict)])
222
+
223
+ comb_DF['Own%'] = sum([comb_DF[0].map(own_dict),
224
+ comb_DF[1].map(own_dict),
225
+ comb_DF[2].map(own_dict)])
226
+ elif stack_size == 4:
227
+ comb_DF['Team'] = comb_DF[0].map(team_dict)
228
+
229
+ comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict),
230
+ comb_DF[1].map(proj_dict),
231
+ comb_DF[2].map(proj_dict),
232
+ comb_DF[3].map(proj_dict)])
233
+
234
+ comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict),
235
+ comb_DF[1].map(cost_dict),
236
+ comb_DF[2].map(cost_dict),
237
+ comb_DF[3].map(cost_dict)])
238
+
239
+ comb_DF['Own%'] = sum([comb_DF[0].map(own_dict),
240
+ comb_DF[1].map(own_dict),
241
+ comb_DF[2].map(own_dict),
242
+ comb_DF[3].map(own_dict)])
243
+ elif stack_size == 5:
244
+ comb_DF['Team'] = comb_DF[0].map(team_dict)
245
+
246
+ comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict),
247
+ comb_DF[1].map(proj_dict),
248
+ comb_DF[2].map(proj_dict),
249
+ comb_DF[3].map(proj_dict),
250
+ comb_DF[4].map(proj_dict)])
251
+
252
+ comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict),
253
+ comb_DF[1].map(cost_dict),
254
+ comb_DF[2].map(cost_dict),
255
+ comb_DF[3].map(cost_dict),
256
+ comb_DF[4].map(cost_dict)])
257
+
258
+ comb_DF['Own%'] = sum([comb_DF[0].map(own_dict),
259
+ comb_DF[1].map(own_dict),
260
+ comb_DF[2].map(own_dict),
261
+ comb_DF[3].map(own_dict),
262
+ comb_DF[4].map(own_dict)])
263
+
264
+ comb_DF = comb_DF.sort_values(by='Proj', ascending=False)
265
+ comb_DF = comb_DF.loc[comb_DF['Salary'] <= max_sal1]
266
+
267
+ cut_var = 0
268
+
269
+ if stack_size == 2:
270
+ while cut_var <= int(len(comb_DF)):
271
+ try:
272
+ if int(cut_var) == 0:
273
+ cur_proj = float(comb_DF.iat[cut_var, 3])
274
+ cur_own = float(comb_DF.iat[cut_var, 5])
275
+ elif int(cut_var) >= 1:
276
+ check_own = float(comb_DF.iat[cut_var, 5])
277
+ if check_own > cur_own:
278
+ comb_DF = comb_DF.drop([cut_var])
279
+ cur_own = cur_own
280
+ cut_var = cut_var - 1
281
+ comb_DF = comb_DF.reset_index()
282
+ comb_DF = comb_DF.drop(['index'], axis=1)
283
+ elif check_own <= cur_own:
284
+ cur_own = float(comb_DF.iat[cut_var, 5])
285
+ cut_var = cut_var
286
+ cut_var += 1
287
+ except:
288
+ cut_var += 1
289
+ elif stack_size == 3:
290
+ while cut_var <= int(len(comb_DF)):
291
+ try:
292
+ if int(cut_var) == 0:
293
+ cur_proj = float(comb_DF.iat[cut_var,4])
294
+ cur_own = float(comb_DF.iat[cut_var,6])
295
+ elif int(cut_var) >= 1:
296
+ check_own = float(comb_DF.iat[cut_var,6])
297
+ if check_own > cur_own:
298
+ comb_DF = comb_DF.drop([cut_var])
299
+ cur_own = cur_own
300
+ cut_var = cut_var - 1
301
+ comb_DF = comb_DF.reset_index()
302
+ comb_DF = comb_DF.drop(['index'], axis=1)
303
+ elif check_own <= cur_own:
304
+ cur_own = float(comb_DF.iat[cut_var,6])
305
+ cut_var = cut_var
306
+ cut_var += 1
307
+ except:
308
+ cut_var += 1
309
+ elif stack_size == 4:
310
+ while cut_var <= int(len(comb_DF)):
311
+ try:
312
+ if int(cut_var) == 0:
313
+ cur_proj = float(comb_DF.iat[cut_var,5])
314
+ cur_own = float(comb_DF.iat[cut_var,7])
315
+ elif int(cut_var) >= 1:
316
+ check_own = float(comb_DF.iat[cut_var,7])
317
+ if check_own > cur_own:
318
+ comb_DF = comb_DF.drop([cut_var])
319
+ cur_own = cur_own
320
+ cut_var = cut_var - 1
321
+ comb_DF = comb_DF.reset_index()
322
+ comb_DF = comb_DF.drop(['index'], axis=1)
323
+ elif check_own <= cur_own:
324
+ cur_own = float(comb_DF.iat[cut_var,7])
325
+ cut_var = cut_var
326
+ cut_var += 1
327
+ except:
328
+ cut_var += 1
329
+ elif stack_size == 5:
330
+ while cut_var <= int(len(comb_DF)):
331
+ try:
332
+ if int(cut_var) == 0:
333
+ cur_proj = float(comb_DF.iat[cut_var,6])
334
+ cur_own = float(comb_DF.iat[cut_var,8])
335
+ elif int(cut_var) >= 1:
336
+ check_own = float(comb_DF.iat[cut_var,8])
337
+ if check_own > cur_own:
338
+ comb_DF = comb_DF.drop([cut_var])
339
+ cur_own = cur_own
340
+ cut_var = cut_var - 1
341
+ comb_DF = comb_DF.reset_index()
342
+ comb_DF = comb_DF.drop(['index'], axis=1)
343
+ elif check_own <= cur_own:
344
+ cur_own = float(comb_DF.iat[cut_var,8])
345
+ cut_var = cut_var
346
+ cut_var += 1
347
+ except:
348
+ cut_var += 1
349
+
350
+ with stack_hold_container:
351
+ stack_hold_container = st.empty()
352
+ st.dataframe(comb_DF.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True)
353
+ st.download_button(
354
+ label="Export Tables",
355
+ data=convert_df_to_csv(comb_DF),
356
+ file_name='NFL_Stack_Options_export.csv',
357
+ mime='text/csv',
358
+ )