Multichem commited on
Commit
fe28b9b
·
1 Parent(s): 01ae0a1

Delete streamlit_app.py

Browse files
Files changed (1) hide show
  1. streamlit_app.py +0 -1398
streamlit_app.py DELETED
@@ -1,1398 +0,0 @@
1
- import streamlit as st
2
- st.set_page_config(layout="wide")
3
-
4
- for name in dir():
5
- if not name.startswith('_'):
6
- del globals()[name]
7
-
8
- import pulp
9
- import numpy as np
10
- import pandas as pd
11
- import streamlit as st
12
- import gspread
13
- import time
14
- import random
15
- import scipy.stats
16
- import os
17
-
18
- @st.cache_resource
19
- def init_conn():
20
- scope = ['https://www.googleapis.com/auth/spreadsheets',
21
- "https://www.googleapis.com/auth/drive"]
22
-
23
- credentials = {
24
- "type": "service_account",
25
- "project_id": "sheets-api-connect-378620",
26
- "private_key_id": "1005124050c80d085e2c5b344345715978dd9cc9",
27
- "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtKa01beXwc88R\nnPZVQTNPVQuBnbwoOfc66gW3547ja/UEyIGAF112dt/VqHprRafkKGmlg55jqJNt\na4zceLKV+wTm7vBu7lDISTJfGzCf2TrxQYNqwMKE2LOjI69dBM8u4Dcb4k0wcp9v\ntW1ZzLVVuwTvmrg7JBHjiSaB+x5wxm/r3FOiJDXdlAgFlytzqgcyeZMJVKKBQHyJ\njEGg/1720A0numuOCt71w/2G0bDmijuj1e6tH32MwRWcvRNZ19K9ssyDz2S9p68s\nYDhIxX69OWxwScTIHLY6J2t8txf/XMivL/636fPlDADvBEVTdlT606n8CcKUVQeq\npUVdG+lfAgMBAAECggEAP38SUA7B69eTfRpo658ycOs3Amr0JW4H/bb1rNeAul0K\nZhwd/HnU4E07y81xQmey5kN5ZeNrD5EvqkZvSyMJHV0EEahZStwhjCfnDB/cxyix\nZ+kFhv4y9eK+kFpUAhBy5nX6T0O+2T6WvzAwbmbVsZ+X8kJyPuF9m8ldcPlD0sce\ntj8NwVq1ys52eosqs7zi2vjt+eMcaY393l4ls+vNq8Yf27cfyFw45W45CH/97/Nu\n5AmuzlCOAfFF+z4OC5g4rei4E/Qgpxa7/uom+BVfv9G0DIGW/tU6Sne0+37uoGKt\nW6DzhgtebUtoYkG7ZJ05BTXGp2lwgVcNRoPwnKJDxQKBgQDT5wYPUBDW+FHbvZSp\nd1m1UQuXyerqOTA9smFaM8sr/UraeH85DJPEIEk8qsntMBVMhvD3Pw8uIUeFNMYj\naLmZFObsL+WctepXrVo5NB6RtLB/jZYxiKMatMLUJIYtcKIp+2z/YtKiWcLnwotB\nWdCjVnPTxpkurmF2fWP/eewZ+wKBgQDRMtJg7etjvKyjYNQ5fARnCc+XsI3gkBe1\nX9oeXfhyfZFeBXWnZzN1ITgFHplDznmBdxAyYGiQdbbkdKQSghviUQ0igBvoDMYy\n1rWcy+a17Mj98uyNEfmb3X2cC6WpvOZaGHwg9+GY67BThwI3FqHIbyk6Ko09WlTX\nQpRQjMzU7QKBgAfi1iflu+q0LR+3a3vvFCiaToskmZiD7latd9AKk2ocsBd3Woy9\n+hXXecJHPOKV4oUJlJgvAZqe5HGBqEoTEK0wyPNLSQlO/9ypd+0fEnArwFHO7CMF\nycQprAKHJXM1eOOFFuZeQCaInqdPZy1UcV5Szla4UmUZWkk1m24blHzXAoGBAMcA\nyH4qdbxX9AYrC1dvsSRvgcnzytMvX05LU0uF6tzGtG0zVlub4ahvpEHCfNuy44UT\nxRWW/oFFaWjjyFxO5sWggpUqNuHEnRopg3QXx22SRRTGbN45li/+QAocTkgsiRh1\nqEcYZsO4mPCsQqAy6E2p6RcK+Xa+omxvSnVhq0x1AoGAKr8GdkCl4CF6rieLMAQ7\nLNBuuoYGaHoh8l5E2uOQpzwxVy/nMBcAv+2+KqHEzHryUv1owOi6pMLv7A9mTFoS\n18B0QRLuz5fSOsVnmldfC9fpUc6H8cH1SINZpzajqQA74bPwELJjnzrCnH79TnHG\nJuElxA33rFEjbgbzdyrE768=\n-----END PRIVATE KEY-----\n",
28
- "client_email": "gspread-connection@sheets-api-connect-378620.iam.gserviceaccount.com",
29
- "client_id": "106625872877651920064",
30
- "auth_uri": "https://accounts.google.com/o/oauth2/auth",
31
- "token_uri": "https://oauth2.googleapis.com/token",
32
- "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
33
- "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40sheets-api-connect-378620.iam.gserviceaccount.com"
34
- }
35
-
36
- gc = gspread.service_account_from_dict(credentials)
37
- return gc
38
-
39
- gc = init_conn()
40
-
41
- game_format = {'Win Percentage': '{:.2%}','First Inning Lead Percentage': '{:.2%}',
42
- 'Fifth Inning Lead Percentage': '{:.2%}', '8+ runs': '{:.2%}', 'DK LevX': '{:.2%}', 'FD LevX': '{:.2%}'}
43
-
44
- player_roo_format = {'Top_finish': '{:.2%}','Top_5_finish': '{:.2%}', 'Top_10_finish': '{:.2%}', '20+%': '{:.2%}', '2x%': '{:.2%}', '3x%': '{:.2%}',
45
- '4x%': '{:.2%}','GPP%': '{:.2%}'}
46
-
47
- freq_format = {'Proj Own': '{:.2%}', 'Exposure': '{:.2%}', 'Edge': '{:.2%}'}
48
-
49
- @st.cache_resource(ttl = 3600)
50
- def set_slate_teams():
51
- sh = gc.open_by_url('https://docs.google.com/spreadsheets/d/1I_1Ve3F4tftgfLQQoRKOJ351XfEG48s36OxXUKxmgS8/edit#gid=1391856348')
52
- worksheet = sh.worksheet('Site_Info')
53
- raw_display = pd.DataFrame(worksheet.get_all_records())
54
-
55
- return raw_display
56
-
57
- @st.cache_resource(ttl = 600)
58
- def player_stat_table():
59
- sh = gc.open_by_url('https://docs.google.com/spreadsheets/d/1I_1Ve3F4tftgfLQQoRKOJ351XfEG48s36OxXUKxmgS8/edit#gid=1391856348')
60
- worksheet = sh.worksheet('Player_Projections')
61
- raw_display = pd.DataFrame(worksheet.get_all_records())
62
-
63
- return raw_display
64
-
65
- @st.cache_resource(ttl = 600)
66
- def load_dk_player_projections():
67
- sh = gc.open_by_url('https://docs.google.com/spreadsheets/d/1I_1Ve3F4tftgfLQQoRKOJ351XfEG48s36OxXUKxmgS8/edit#gid=1391856348')
68
- worksheet = sh.worksheet('DK_ROO')
69
- load_display = pd.DataFrame(worksheet.get_all_records())
70
- load_display.replace('', np.nan, inplace=True)
71
- raw_display = load_display.dropna(subset=['Median'])
72
- del load_display
73
-
74
- return raw_display
75
-
76
- @st.cache_resource(ttl = 600)
77
- def load_fd_player_projections():
78
- sh = gc.open_by_url('https://docs.google.com/spreadsheets/d/1I_1Ve3F4tftgfLQQoRKOJ351XfEG48s36OxXUKxmgS8/edit#gid=1391856348')
79
- worksheet = sh.worksheet('FD_ROO')
80
- load_display = pd.DataFrame(worksheet.get_all_records())
81
- load_display.replace('', np.nan, inplace=True)
82
- raw_display = load_display.dropna(subset=['Median'])
83
- del load_display
84
-
85
- return raw_display
86
-
87
- @st.cache_resource(ttl = 3600)
88
- def set_export_ids():
89
- sh = gc.open_by_url('https://docs.google.com/spreadsheets/d/1I_1Ve3F4tftgfLQQoRKOJ351XfEG48s36OxXUKxmgS8/edit#gid=1391856348')
90
- worksheet = sh.worksheet('DK_ROO')
91
- load_display = pd.DataFrame(worksheet.get_all_records())
92
- load_display.replace('', np.nan, inplace=True)
93
- raw_display = load_display.dropna(subset=['Median'])
94
- dk_ids = dict(zip(raw_display['Player'], raw_display['player_id']))
95
-
96
- worksheet = sh.worksheet('FD_ROO')
97
- load_display = pd.DataFrame(worksheet.get_all_records())
98
- load_display.replace('', np.nan, inplace=True)
99
- raw_display = load_display.dropna(subset=['Median'])
100
- fd_ids = dict(zip(raw_display['Player'], raw_display['player_id']))
101
-
102
- del load_display
103
- del raw_display
104
-
105
- return dk_ids, fd_ids
106
-
107
- @st.cache_data
108
- def convert_df_to_csv(df):
109
- return df.to_csv().encode('utf-8')
110
-
111
- def run_seed_frame(seed_depth1, Strength_var, strength_grow, Teams_used, Total_Runs):
112
- RunsVar = 1
113
- seed_depth_def = seed_depth1
114
- Strength_var_def = Strength_var
115
- strength_grow_def = strength_grow
116
- Teams_used_def = Teams_used
117
- Total_Runs_def = Total_Runs
118
- while RunsVar <= seed_depth_def:
119
- if RunsVar <= 3:
120
- FieldStrength = Strength_var_def
121
- RandomPortfolio, maps_dict = get_correlated_portfolio_for_sim(Total_Runs_def * .1)
122
- FinalPortfolio = RandomPortfolio
123
- FinalPortfolio2, maps_dict2 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .1)
124
- FinalPortfolio = pd.concat([FinalPortfolio, FinalPortfolio2], axis=0)
125
- maps_dict.update(maps_dict2)
126
- del FinalPortfolio2
127
- del maps_dict2
128
- elif RunsVar > 3 and RunsVar <= 4:
129
- FieldStrength += (strength_grow_def + ((30 - len(Teams_used_def)) * .001))
130
- FinalPortfolio3, maps_dict3 = get_correlated_portfolio_for_sim(Total_Runs_def * .1)
131
- FinalPortfolio4, maps_dict4 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .1)
132
- FinalPortfolio = pd.concat([FinalPortfolio, FinalPortfolio3], axis=0)
133
- FinalPortfolio = pd.concat([FinalPortfolio, FinalPortfolio4], axis=0)
134
- FinalPortfolio = FinalPortfolio.drop_duplicates(subset = ['Projection', 'Own'],keep = 'last').reset_index(drop = True)
135
- maps_dict.update(maps_dict3)
136
- maps_dict.update(maps_dict4)
137
- del FinalPortfolio3
138
- del maps_dict3
139
- del FinalPortfolio4
140
- del maps_dict4
141
- elif RunsVar > 4:
142
- FieldStrength = 1
143
- FinalPortfolio3, maps_dict3 = get_correlated_portfolio_for_sim(Total_Runs_def * .1)
144
- FinalPortfolio4, maps_dict4 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .1)
145
- FinalPortfolio = pd.concat([FinalPortfolio, FinalPortfolio3], axis=0)
146
- FinalPortfolio = pd.concat([FinalPortfolio, FinalPortfolio4], axis=0)
147
- FinalPortfolio = FinalPortfolio.drop_duplicates(subset = ['Projection', 'Own'],keep = 'last').reset_index(drop = True)
148
- maps_dict.update(maps_dict3)
149
- maps_dict.update(maps_dict4)
150
- del FinalPortfolio3
151
- del maps_dict3
152
- del FinalPortfolio4
153
- del maps_dict4
154
- RunsVar += 1
155
-
156
- return FinalPortfolio, maps_dict
157
-
158
- def create_stack_options(player_data, wr_var):
159
- merged_frame = pd.DataFrame(columns = ['QB', 'Player'])
160
- data_raw = player_data.sort_values(by='Median', ascending=False)
161
-
162
- for team in data_raw['Team'].unique():
163
- data_split = data_raw.loc[data_raw['Team'] == team]
164
- qb_frame = data_split.loc[data_split['Position'] == 'QB'].reset_index()
165
- wr_frame = data_split.loc[data_split['Position'] == 'WR'].iloc[wr_var-1:wr_var]
166
- wr_frame['QB'] = qb_frame['Player'][0]
167
- merge_slice = wr_frame[['QB', 'Player']]
168
- merged_frame = pd.concat([merged_frame, merge_slice])
169
- merged_frame = merged_frame.reset_index()
170
- correl_dict = dict(zip(merged_frame.QB, merged_frame.Player))
171
-
172
- del merged_frame
173
- del data_raw
174
-
175
- return correl_dict
176
-
177
- def create_overall_dfs(pos_players, table_name, dict_name, pos):
178
- if pos == "FLEX":
179
- pos_players = pos_players.sort_values(by='Value', ascending=False)
180
- table_name_raw = pos_players.reset_index(drop=True)
181
- overall_table_name = table_name_raw.head(round(len(table_name_raw)))
182
- overall_table_name = overall_table_name.assign(Var = range(0,len(overall_table_name)))
183
- overall_dict_name = pd.Series(overall_table_name.Player.values, index=overall_table_name.Var).to_dict()
184
-
185
- del pos_players
186
- del table_name_raw
187
- elif pos != "FLEX":
188
- table_name_raw = pos_players[pos_players['Position'].str.contains(pos)].reset_index(drop=True)
189
- overall_table_name = table_name_raw.head(round(len(table_name_raw)))
190
- overall_table_name = overall_table_name.assign(Var = range(0,len(overall_table_name)))
191
- overall_dict_name = pd.Series(overall_table_name.Player.values, index=overall_table_name.Var).to_dict()
192
-
193
- del pos_players
194
- del table_name_raw
195
-
196
- return overall_table_name, overall_dict_name
197
-
198
-
199
- def get_overall_merged_df():
200
- ref_dict = {
201
- 'pos':['RB', 'WR', 'TE', 'FLEX'],
202
- 'pos_dfs':['RB_Table', 'WR_Table', 'TE_Table', 'FLEX_Table'],
203
- 'pos_dicts':['rb_dict', 'wr_dict', 'te_dict', 'flex_dict']
204
- }
205
-
206
- for i in range(0,4):
207
- ref_dict['pos_dfs'][i], ref_dict['pos_dicts'][i] =\
208
- create_overall_dfs(pos_players, ref_dict['pos_dfs'][i], ref_dict['pos_dicts'][i], ref_dict['pos'][i])
209
-
210
- df_out = pd.concat(ref_dict['pos_dfs'], ignore_index=True)
211
-
212
- return df_out, ref_dict
213
-
214
- def calculate_range_var(count, min_val, FieldStrength, field_growth):
215
- var = round(len(count[0]) * FieldStrength)
216
- var = max(var, min_val)
217
- var += round(field_growth)
218
- return min(var, len(count[0]))
219
-
220
- def create_random_portfolio(Total_Sample_Size, raw_baselines):
221
-
222
- O_merge, full_pos_player_dict = get_overall_merged_df()
223
- max_var = len(raw_baselines[raw_baselines['Position'] == 'QB'])
224
-
225
- field_growth_rounded = round(field_growth)
226
- ranges_dict = {}
227
-
228
- # Calculate ranges
229
- for df, dict_val, min_val, key in zip(ref_dict['pos_dfs'], ref_dict['pos_dicts'], [10, 20, 30, 10], ['RB', 'WR', 'TE', 'FLEX']):
230
- count = create_overall_dfs(pos_players, df, dict_val, key)
231
- ranges_dict[f"{key.lower()}_range"] = calculate_range_var(count, min_val, FieldStrength, field_growth_rounded)
232
-
233
- ranges_dict['qb_range'] = round(max_var / 2)
234
- ranges_dict['dst_range'] = round(max_var / 2)
235
- # Generate unique ranges
236
- # for key, value in ranges_dict.items():
237
- # ranges_dict[f"{key}_Uniques"] = list(range(0, value, 1))
238
-
239
- # Generate random portfolios
240
- rng = np.random.default_rng()
241
- total_elements = [1, 2, 3, 1, 1, 1]
242
- keys = ['qb', 'rb', 'wr', 'te', 'flex', 'dst']
243
-
244
- all_choices = [rng.choice(ranges_dict[f"{key}_range"], size=(Total_Sample_Size, elem)) for key, elem in zip(keys, total_elements)]
245
- RandomPortfolio = pd.DataFrame(np.hstack(all_choices), columns=['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST'])
246
- RandomPortfolio['User/Field'] = 0
247
-
248
- del O_merge
249
-
250
- return RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict
251
-
252
- def get_correlated_portfolio_for_sim(Total_Sample_Size):
253
-
254
- sizesplit = round(Total_Sample_Size * sharp_split)
255
-
256
- RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict = create_random_portfolio(sizesplit, raw_baselines)
257
- stack_num = random.randint(1, 3)
258
- stacking_dict = create_stack_options(raw_baselines, stack_num)
259
-
260
- # # Create a dictionary for mapping positions to their corresponding dictionaries
261
- # dict_map = {
262
- # 'QB': qb_dict,
263
- # 'RB1': full_pos_player_dict['pos_dicts'][0],
264
- # 'RB2': full_pos_player_dict['pos_dicts'][0],
265
- # 'WR1': full_pos_player_dict['pos_dicts'][1],
266
- # 'WR2': full_pos_player_dict['pos_dicts'][1],
267
- # 'WR3': full_pos_player_dict['pos_dicts'][1],
268
- # 'TE': full_pos_player_dict['pos_dicts'][2],
269
- # 'FLEX': full_pos_player_dict['pos_dicts'][3],
270
- # 'DST': def_dict
271
- # }
272
-
273
- # # Apply mapping for each position
274
- # for pos, mapping in dict_map.items():
275
- # RandomPortfolio[pos] = RandomPortfolio[pos].map(mapping).astype("string[pyarrow]")
276
-
277
- # # This part appears to be for filtering. Consider if it can be optimized depending on the data characteristics
278
- # RandomPortfolio['plyr_list'] = RandomPortfolio.values.tolist()
279
- # RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x)))
280
- # RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 10].drop(columns=['plyr_list','plyr_count']).reset_index(drop=True)
281
-
282
- RandomPortfolio['QB'] = pd.Series(list(RandomPortfolio['QB'].map(qb_dict)), dtype="string[pyarrow]")
283
- RandomPortfolio['RB1'] = pd.Series(list(RandomPortfolio['RB1'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
284
- RandomPortfolio['RB2'] = pd.Series(list(RandomPortfolio['RB2'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
285
- RandomPortfolio['WR1'] = pd.Series(list(RandomPortfolio['QB'].map(stacking_dict)), dtype="string[pyarrow]")
286
- RandomPortfolio['WR2'] = pd.Series(list(RandomPortfolio['WR2'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
287
- RandomPortfolio['WR3'] = pd.Series(list(RandomPortfolio['WR3'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
288
- RandomPortfolio['TE'] = pd.Series(list(RandomPortfolio['TE'].map(full_pos_player_dict['pos_dicts'][2])), dtype="string[pyarrow]")
289
- RandomPortfolio['FLEX'] = pd.Series(list(RandomPortfolio['FLEX'].map(full_pos_player_dict['pos_dicts'][3])), dtype="string[pyarrow]")
290
- RandomPortfolio['DST'] = pd.Series(list(RandomPortfolio['DST'].map(def_dict)), dtype="string[pyarrow]")
291
- RandomPortfolio['plyr_list'] = RandomPortfolio[RandomPortfolio.columns.values.tolist()].values.tolist()
292
- RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x)))
293
- RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 10].drop(columns=['plyr_list','plyr_count']).\
294
- reset_index(drop=True)
295
-
296
- del sizesplit
297
- del full_pos_player_dict
298
- del ranges_dict
299
- del stack_num
300
- del stacking_dict
301
-
302
-
303
-
304
- RandomPortfolio['QBs'] = RandomPortfolio['QB'].map(maps_dict['Salary_map']).astype(np.int32)
305
- RandomPortfolio['RB1s'] = RandomPortfolio['RB1'].map(maps_dict['Salary_map']).astype(np.int32)
306
- RandomPortfolio['RB2s'] = RandomPortfolio['RB2'].map(maps_dict['Salary_map']).astype(np.int32)
307
- RandomPortfolio['WR1s'] = RandomPortfolio['WR1'].map(maps_dict['Salary_map']).astype(np.int32)
308
- RandomPortfolio['WR2s'] = RandomPortfolio['WR2'].map(maps_dict['Salary_map']).astype(np.int32)
309
- RandomPortfolio['WR3s'] = RandomPortfolio['WR3'].map(maps_dict['Salary_map']).astype(np.int32)
310
- RandomPortfolio['TEs'] = RandomPortfolio['TE'].map(maps_dict['Salary_map']).astype(np.int32)
311
- RandomPortfolio['FLEXs'] = RandomPortfolio['FLEX'].map(maps_dict['Salary_map']).astype(np.int32)
312
- RandomPortfolio['DSTs'] = RandomPortfolio['DST'].map(maps_dict['Salary_map']).astype(np.int32)
313
-
314
- RandomPortfolio['QBp'] = RandomPortfolio['QB'].map(maps_dict['Projection_map']).astype(np.float16)
315
- RandomPortfolio['RB1p'] = RandomPortfolio['RB1'].map(maps_dict['Projection_map']).astype(np.float16)
316
- RandomPortfolio['RB2p'] = RandomPortfolio['RB2'].map(maps_dict['Projection_map']).astype(np.float16)
317
- RandomPortfolio['WR1p'] = RandomPortfolio['WR1'].map(maps_dict['Projection_map']).astype(np.float16)
318
- RandomPortfolio['WR2p'] = RandomPortfolio['WR2'].map(maps_dict['Projection_map']).astype(np.float16)
319
- RandomPortfolio['WR3p'] = RandomPortfolio['WR3'].map(maps_dict['Projection_map']).astype(np.float16)
320
- RandomPortfolio['TEp'] = RandomPortfolio['TE'].map(maps_dict['Projection_map']).astype(np.float16)
321
- RandomPortfolio['FLEXp'] = RandomPortfolio['FLEX'].map(maps_dict['Projection_map']).astype(np.float16)
322
- RandomPortfolio['DSTp'] = RandomPortfolio['DST'].map(maps_dict['Projection_map']).astype(np.float16)
323
-
324
- RandomPortfolio['QBo'] = RandomPortfolio['QB'].map(maps_dict['Own_map']).astype(np.float16)
325
- RandomPortfolio['RB1o'] = RandomPortfolio['RB1'].map(maps_dict['Own_map']).astype(np.float16)
326
- RandomPortfolio['RB2o'] = RandomPortfolio['RB2'].map(maps_dict['Own_map']).astype(np.float16)
327
- RandomPortfolio['WR1o'] = RandomPortfolio['WR1'].map(maps_dict['Own_map']).astype(np.float16)
328
- RandomPortfolio['WR2o'] = RandomPortfolio['WR2'].map(maps_dict['Own_map']).astype(np.float16)
329
- RandomPortfolio['WR3o'] = RandomPortfolio['WR3'].map(maps_dict['Own_map']).astype(np.float16)
330
- RandomPortfolio['TEo'] = RandomPortfolio['TE'].map(maps_dict['Own_map']).astype(np.float16)
331
- RandomPortfolio['FLEXo'] = RandomPortfolio['FLEX'].map(maps_dict['Own_map']).astype(np.float16)
332
- RandomPortfolio['DSTo'] = RandomPortfolio['DST'].map(maps_dict['Own_map']).astype(np.float16)
333
-
334
- RandomPortArray = RandomPortfolio.to_numpy()
335
- del RandomPortfolio
336
-
337
- RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,10:19].astype(int))]
338
- RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,19:28].astype(np.double))]
339
- RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,28:37].astype(np.double))]
340
-
341
- RandomPortArrayOut = np.delete(RandomPortArray, np.s_[10:37], axis=1)
342
- RandomPortfolioDF = pd.DataFrame(RandomPortArrayOut, columns = ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'User/Field', 'Salary', 'Projection', 'Own'])
343
- RandomPortfolioDF = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
344
- del RandomPortArray
345
- del RandomPortArrayOut
346
-
347
- if insert_port == 1:
348
- CleanPortfolio['Salary'] = sum([CleanPortfolio['QB'].map(maps_dict['Salary_map']),
349
- CleanPortfolio['RB1'].map(maps_dict['Salary_map']),
350
- CleanPortfolio['RB2'].map(maps_dict['Salary_map']),
351
- CleanPortfolio['WR1'].map(maps_dict['Salary_map']),
352
- CleanPortfolio['WR2'].map(maps_dict['Salary_map']),
353
- CleanPortfolio['WR3'].map(maps_dict['Salary_map']),
354
- CleanPortfolio['TE'].map(maps_dict['Salary_map']),
355
- CleanPortfolio['FLEX'].map(maps_dict['Salary_map']),
356
- CleanPortfolio['DST'].map(maps_dict['Salary_map'])
357
- ]).astype(np.int16)
358
- if insert_port == 1:
359
- CleanPortfolio['Projection'] = sum([CleanPortfolio['QB'].map(up_dict['Projection_map']),
360
- CleanPortfolio['RB1'].map(up_dict['Projection_map']),
361
- CleanPortfolio['RB2'].map(up_dict['Projection_map']),
362
- CleanPortfolio['WR1'].map(up_dict['Projection_map']),
363
- CleanPortfolio['WR2'].map(up_dict['Projection_map']),
364
- CleanPortfolio['WR3'].map(up_dict['Projection_map']),
365
- CleanPortfolio['TE'].map(up_dict['Projection_map']),
366
- CleanPortfolio['FLEX'].map(up_dict['Projection_map']),
367
- CleanPortfolio['DST'].map(up_dict['Projection_map'])
368
- ]).astype(np.float16)
369
- if insert_port == 1:
370
- CleanPortfolio['Own'] = sum([CleanPortfolio['QB'].map(maps_dict['Own_map']),
371
- CleanPortfolio['RB1'].map(maps_dict['Own_map']),
372
- CleanPortfolio['RB2'].map(maps_dict['Own_map']),
373
- CleanPortfolio['WR1'].map(maps_dict['Own_map']),
374
- CleanPortfolio['WR2'].map(maps_dict['Own_map']),
375
- CleanPortfolio['WR3'].map(maps_dict['Own_map']),
376
- CleanPortfolio['TE'].map(maps_dict['Own_map']),
377
- CleanPortfolio['FLEX'].map(maps_dict['Own_map']),
378
- CleanPortfolio['DST'].map(maps_dict['Own_map'])
379
- ]).astype(np.float16)
380
-
381
- if site_var1 == 'Draftkings':
382
- RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 50000].reset_index(drop=True)
383
- RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= 49500 - (FieldStrength * 1000)].reset_index(drop=True)
384
- elif site_var1 == 'Fanduel':
385
- RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 60000].reset_index(drop=True)
386
- RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= 59500 - (FieldStrength * 1000)].reset_index(drop=True)
387
-
388
- RandomPortfolio = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
389
-
390
- RandomPortfolio = RandomPortfolio[['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'User/Field', 'Salary', 'Projection', 'Own']]
391
-
392
- return RandomPortfolio, maps_dict
393
-
394
- def get_uncorrelated_portfolio_for_sim(Total_Sample_Size):
395
-
396
- sizesplit = round(Total_Sample_Size * (1-sharp_split))
397
-
398
- RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict = create_random_portfolio(sizesplit, raw_baselines)
399
-
400
- RandomPortfolio['QB'] = pd.Series(list(RandomPortfolio['QB'].map(qb_dict)), dtype="string[pyarrow]")
401
- RandomPortfolio['RB1'] = pd.Series(list(RandomPortfolio['RB1'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
402
- RandomPortfolio['RB2'] = pd.Series(list(RandomPortfolio['RB2'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
403
- RandomPortfolio['WR1'] = pd.Series(list(RandomPortfolio['WR1'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
404
- RandomPortfolio['WR2'] = pd.Series(list(RandomPortfolio['WR2'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
405
- RandomPortfolio['WR3'] = pd.Series(list(RandomPortfolio['WR3'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
406
- RandomPortfolio['TE'] = pd.Series(list(RandomPortfolio['TE'].map(full_pos_player_dict['pos_dicts'][2])), dtype="string[pyarrow]")
407
- RandomPortfolio['FLEX'] = pd.Series(list(RandomPortfolio['FLEX'].map(full_pos_player_dict['pos_dicts'][3])), dtype="string[pyarrow]")
408
- RandomPortfolio['DST'] = pd.Series(list(RandomPortfolio['DST'].map(def_dict)), dtype="string[pyarrow]")
409
- RandomPortfolio['plyr_list'] = RandomPortfolio[RandomPortfolio.columns.values.tolist()].values.tolist()
410
- RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x)))
411
- RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 10].drop(columns=['plyr_list','plyr_count']).\
412
- reset_index(drop=True)
413
-
414
- del sizesplit
415
- del full_pos_player_dict
416
- del ranges_dict
417
-
418
- RandomPortfolio['QBs'] = RandomPortfolio['QB'].map(maps_dict['Salary_map']).astype(np.int32)
419
- RandomPortfolio['RB1s'] = RandomPortfolio['RB1'].map(maps_dict['Salary_map']).astype(np.int32)
420
- RandomPortfolio['RB2s'] = RandomPortfolio['RB2'].map(maps_dict['Salary_map']).astype(np.int32)
421
- RandomPortfolio['WR1s'] = RandomPortfolio['WR1'].map(maps_dict['Salary_map']).astype(np.int32)
422
- RandomPortfolio['WR2s'] = RandomPortfolio['WR2'].map(maps_dict['Salary_map']).astype(np.int32)
423
- RandomPortfolio['WR3s'] = RandomPortfolio['WR3'].map(maps_dict['Salary_map']).astype(np.int32)
424
- RandomPortfolio['TEs'] = RandomPortfolio['TE'].map(maps_dict['Salary_map']).astype(np.int32)
425
- RandomPortfolio['FLEXs'] = RandomPortfolio['FLEX'].map(maps_dict['Salary_map']).astype(np.int32)
426
- RandomPortfolio['DSTs'] = RandomPortfolio['DST'].map(maps_dict['Salary_map']).astype(np.int32)
427
-
428
- RandomPortfolio['QBp'] = RandomPortfolio['QB'].map(maps_dict['Projection_map']).astype(np.float16)
429
- RandomPortfolio['RB1p'] = RandomPortfolio['RB1'].map(maps_dict['Projection_map']).astype(np.float16)
430
- RandomPortfolio['RB2p'] = RandomPortfolio['RB2'].map(maps_dict['Projection_map']).astype(np.float16)
431
- RandomPortfolio['WR1p'] = RandomPortfolio['WR1'].map(maps_dict['Projection_map']).astype(np.float16)
432
- RandomPortfolio['WR2p'] = RandomPortfolio['WR2'].map(maps_dict['Projection_map']).astype(np.float16)
433
- RandomPortfolio['WR3p'] = RandomPortfolio['WR3'].map(maps_dict['Projection_map']).astype(np.float16)
434
- RandomPortfolio['TEp'] = RandomPortfolio['TE'].map(maps_dict['Projection_map']).astype(np.float16)
435
- RandomPortfolio['FLEXp'] = RandomPortfolio['FLEX'].map(maps_dict['Projection_map']).astype(np.float16)
436
- RandomPortfolio['DSTp'] = RandomPortfolio['DST'].map(maps_dict['Projection_map']).astype(np.float16)
437
-
438
- RandomPortfolio['QBo'] = RandomPortfolio['QB'].map(maps_dict['Own_map']).astype(np.float16)
439
- RandomPortfolio['RB1o'] = RandomPortfolio['RB1'].map(maps_dict['Own_map']).astype(np.float16)
440
- RandomPortfolio['RB2o'] = RandomPortfolio['RB2'].map(maps_dict['Own_map']).astype(np.float16)
441
- RandomPortfolio['WR1o'] = RandomPortfolio['WR1'].map(maps_dict['Own_map']).astype(np.float16)
442
- RandomPortfolio['WR2o'] = RandomPortfolio['WR2'].map(maps_dict['Own_map']).astype(np.float16)
443
- RandomPortfolio['WR3o'] = RandomPortfolio['WR3'].map(maps_dict['Own_map']).astype(np.float16)
444
- RandomPortfolio['TEo'] = RandomPortfolio['TE'].map(maps_dict['Own_map']).astype(np.float16)
445
- RandomPortfolio['FLEXo'] = RandomPortfolio['FLEX'].map(maps_dict['Own_map']).astype(np.float16)
446
- RandomPortfolio['DSTo'] = RandomPortfolio['DST'].map(maps_dict['Own_map']).astype(np.float16)
447
-
448
- RandomPortArray = RandomPortfolio.to_numpy()
449
- del RandomPortfolio
450
-
451
- RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,10:19].astype(int))]
452
- RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,19:28].astype(np.double))]
453
- RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,28:37].astype(np.double))]
454
-
455
- RandomPortArrayOut = np.delete(RandomPortArray, np.s_[10:37], axis=1)
456
- RandomPortfolioDF = pd.DataFrame(RandomPortArrayOut, columns = ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'User/Field', 'Salary', 'Projection', 'Own'])
457
- RandomPortfolioDF = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
458
- del RandomPortArray
459
- del RandomPortArrayOut
460
- # st.table(RandomPortfolioDF.head(50))
461
-
462
- if insert_port == 1:
463
- CleanPortfolio['Salary'] = sum([CleanPortfolio['QB'].map(maps_dict['Salary_map']),
464
- CleanPortfolio['RB1'].map(maps_dict['Salary_map']),
465
- CleanPortfolio['RB2'].map(maps_dict['Salary_map']),
466
- CleanPortfolio['WR1'].map(maps_dict['Salary_map']),
467
- CleanPortfolio['WR2'].map(maps_dict['Salary_map']),
468
- CleanPortfolio['WR3'].map(maps_dict['Salary_map']),
469
- CleanPortfolio['TE'].map(maps_dict['Salary_map']),
470
- CleanPortfolio['FLEX'].map(maps_dict['Salary_map']),
471
- CleanPortfolio['DST'].map(maps_dict['Salary_map'])
472
- ]).astype(np.int16)
473
- if insert_port == 1:
474
- CleanPortfolio['Projection'] = sum([CleanPortfolio['QB'].map(up_dict['Projection_map']),
475
- CleanPortfolio['RB1'].map(up_dict['Projection_map']),
476
- CleanPortfolio['RB2'].map(up_dict['Projection_map']),
477
- CleanPortfolio['WR1'].map(up_dict['Projection_map']),
478
- CleanPortfolio['WR2'].map(up_dict['Projection_map']),
479
- CleanPortfolio['WR3'].map(up_dict['Projection_map']),
480
- CleanPortfolio['TE'].map(up_dict['Projection_map']),
481
- CleanPortfolio['FLEX'].map(up_dict['Projection_map']),
482
- CleanPortfolio['DST'].map(up_dict['Projection_map'])
483
- ]).astype(np.float16)
484
- if insert_port == 1:
485
- CleanPortfolio['Own'] = sum([CleanPortfolio['QB'].map(maps_dict['Own_map']),
486
- CleanPortfolio['RB1'].map(maps_dict['Own_map']),
487
- CleanPortfolio['RB2'].map(maps_dict['Own_map']),
488
- CleanPortfolio['WR1'].map(maps_dict['Own_map']),
489
- CleanPortfolio['WR2'].map(maps_dict['Own_map']),
490
- CleanPortfolio['WR3'].map(maps_dict['Own_map']),
491
- CleanPortfolio['TE'].map(maps_dict['Own_map']),
492
- CleanPortfolio['FLEX'].map(maps_dict['Own_map']),
493
- CleanPortfolio['DST'].map(maps_dict['Own_map'])
494
- ]).astype(np.float16)
495
-
496
- if site_var1 == 'Draftkings':
497
- RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 50000].reset_index(drop=True)
498
- RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= 49500 - (FieldStrength * 1000)].reset_index(drop=True)
499
- elif site_var1 == 'Fanduel':
500
- RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 60000].reset_index(drop=True)
501
- RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= 59500 - (FieldStrength * 1000)].reset_index(drop=True)
502
-
503
- RandomPortfolio = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
504
-
505
- RandomPortfolio = RandomPortfolio[['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'User/Field', 'Salary', 'Projection', 'Own']]
506
-
507
- return RandomPortfolio, maps_dict
508
-
509
- player_stats = player_stat_table()
510
- dk_roo_raw = load_dk_player_projections()
511
- fd_roo_raw = load_fd_player_projections()
512
- t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
513
- site_slates = set_slate_teams()
514
- dkid_dict, fdid_dict = set_export_ids()
515
-
516
- static_exposure = pd.DataFrame(columns=['Player', 'count'])
517
- overall_exposure = pd.DataFrame(columns=['Player', 'count'])
518
-
519
- tab1, tab2 = st.tabs(['Uploads', 'Contest Sim'])
520
-
521
- with tab1:
522
- with st.container():
523
- st.info("The Projections file can have any columns in any order, but must contain columns explicitly named: 'Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', and 'Own'. Upload your projections first to avoid an error message.")
524
- col1, col2 = st.columns([3, 3])
525
-
526
- with col1:
527
- proj_file = st.file_uploader("Upload Projections File", key = 'proj_uploader')
528
-
529
- if proj_file is not None:
530
- try:
531
- proj_dataframe = pd.read_csv(proj_file)
532
- proj_dataframe = proj_dataframe.dropna(subset='Median')
533
- proj_dataframe['Player'] = proj_dataframe['Player'].str.strip()
534
- try:
535
- proj_dataframe['Own'] = proj_dataframe['Own'].str.strip('%').astype(float)
536
- except:
537
- pass
538
-
539
- except:
540
- proj_dataframe = pd.read_excel(proj_file)
541
- proj_dataframe = proj_dataframe.dropna(subset='Median')
542
- proj_dataframe['Player'] = proj_dataframe['Player'].str.strip()
543
- try:
544
- proj_dataframe['Own'] = proj_dataframe['Own'].str.strip('%').astype(float)
545
- except:
546
- pass
547
- st.table(proj_dataframe.head(10))
548
- player_salary_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Salary))
549
- player_proj_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Median))
550
- player_own_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Own))
551
- player_team_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Team))
552
-
553
- with col2:
554
- portfolio_file = st.file_uploader("Upload Portfolio File", key = 'portfolio_uploader')
555
-
556
- if portfolio_file is not None:
557
- try:
558
- portfolio_dataframe = pd.read_csv(portfolio_file)
559
-
560
- except:
561
- portfolio_dataframe = pd.read_excel(portfolio_file)
562
-
563
- try:
564
- try:
565
- portfolio_dataframe.columns=["QB", "RB1", "RB2", "WR1", "WR2", "WR3", "TE", "FLEX", "DST"]
566
- split_portfolio = portfolio_dataframe
567
- split_portfolio[['QB', 'QB_ID']] = split_portfolio.QB.str.split("(", n=1, expand = True)
568
- split_portfolio[['RB1', 'RB1_ID']] = split_portfolio.RB1.str.split("(", n=1, expand = True)
569
- split_portfolio[['RB2', 'RB2_ID']] = split_portfolio.RB2.str.split("(", n=1, expand = True)
570
- split_portfolio[['WR1', 'WR1_ID']] = split_portfolio.WR1.str.split("(", n=1, expand = True)
571
- split_portfolio[['WR2', 'WR2_ID']] = split_portfolio.WR2.str.split("(", n=1, expand = True)
572
- split_portfolio[['WR3', 'WR3_ID']] = split_portfolio.WR3.str.split("(", n=1, expand = True)
573
- split_portfolio[['TE', 'TE_ID']] = split_portfolio.TE.str.split("(", n=1, expand = True)
574
- split_portfolio[['FLEX', 'FLEX_ID']] = split_portfolio.FLEX.str.split("(", n=1, expand = True)
575
- split_portfolio[['DST', 'DST_ID']] = split_portfolio.DST.str.split("(", n=1, expand = True)
576
-
577
- split_portfolio['QB'] = split_portfolio['QB'].str.strip()
578
- split_portfolio['RB1'] = split_portfolio['RB1'].str.strip()
579
- split_portfolio['RB2'] = split_portfolio['RB2'].str.strip()
580
- split_portfolio['WR1'] = split_portfolio['WR1'].str.strip()
581
- split_portfolio['WR2'] = split_portfolio['WR2'].str.strip()
582
- split_portfolio['WR3'] = split_portfolio['WR3'].str.strip()
583
- split_portfolio['TE'] = split_portfolio['TE'].str.strip()
584
- split_portfolio['FLEX'] = split_portfolio['FLEX'].str.strip()
585
- split_portfolio['DST'] = split_portfolio['DST'].str.strip()
586
-
587
- split_portfolio['Salary'] = sum([split_portfolio['QB'].map(player_salary_dict),
588
- split_portfolio['RB1'].map(player_salary_dict),
589
- split_portfolio['RB2'].map(player_salary_dict),
590
- split_portfolio['WR1'].map(player_salary_dict),
591
- split_portfolio['WR2'].map(player_salary_dict),
592
- split_portfolio['WR3'].map(player_salary_dict),
593
- split_portfolio['TE'].map(player_salary_dict),
594
- split_portfolio['FLEX'].map(player_salary_dict),
595
- split_portfolio['DST'].map(player_salary_dict)])
596
-
597
- split_portfolio['Projection'] = sum([split_portfolio['QB'].map(player_proj_dict),
598
- split_portfolio['RB1'].map(player_proj_dict),
599
- split_portfolio['RB2'].map(player_proj_dict),
600
- split_portfolio['WR1'].map(player_proj_dict),
601
- split_portfolio['WR2'].map(player_proj_dict),
602
- split_portfolio['WR3'].map(player_proj_dict),
603
- split_portfolio['TE'].map(player_proj_dict),
604
- split_portfolio['FLEX'].map(player_proj_dict),
605
- split_portfolio['DST'].map(player_proj_dict)])
606
-
607
- split_portfolio['Ownership'] = sum([split_portfolio['QB'].map(player_own_dict),
608
- split_portfolio['RB1'].map(player_own_dict),
609
- split_portfolio['RB2'].map(player_own_dict),
610
- split_portfolio['WR1'].map(player_own_dict),
611
- split_portfolio['WR2'].map(player_own_dict),
612
- split_portfolio['WR3'].map(player_own_dict),
613
- split_portfolio['TE'].map(player_own_dict),
614
- split_portfolio['FLEX'].map(player_own_dict),
615
- split_portfolio['DST'].map(player_own_dict)])
616
-
617
- split_portfolio['QB_team'] = split_portfolio['QB'].map(player_team_dict)
618
- split_portfolio['RB1_team'] = split_portfolio['RB1'].map(player_team_dict)
619
- split_portfolio['RB2_team'] = split_portfolio['RB2'].map(player_team_dict)
620
- split_portfolio['WR1_team'] = split_portfolio['WR1'].map(player_team_dict)
621
- split_portfolio['WR2_team'] = split_portfolio['WR2'].map(player_team_dict)
622
- split_portfolio['WR3_team'] = split_portfolio['WR3'].map(player_team_dict)
623
- split_portfolio['TE_team'] = split_portfolio['TE'].map(player_team_dict)
624
- split_portfolio['FLEX_team'] = split_portfolio['FLEX'].map(player_team_dict)
625
- split_portfolio['DST_team'] = split_portfolio['DST'].map(player_team_dict)
626
-
627
- split_portfolio = split_portfolio[['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'Salary', 'Projection', 'Ownership', 'QB_team',
628
- 'RB1_team', 'RB2_team', 'WR1_team', 'WR2_team', 'WR3_team', 'TE_team', 'FLEX_team', 'DST_team']]
629
-
630
- split_portfolio['Main_Stack'] = split_portfolio.iloc[:, 15:19].apply(lambda row: row.value_counts().nlargest(2).index[0],axis=1)
631
- split_portfolio['Main_Stack_Size'] = split_portfolio.iloc[:, 15:19].apply(lambda row: row.value_counts().nlargest(2).values[0],axis=1)
632
- split_portfolio['Main_Stack_Size'] = split_portfolio['Main_Stack_Size'] - 1
633
-
634
-
635
- except:
636
- portfolio_dataframe.columns=["QB", "RB1", "RB2", "WR1", "WR2", "WR3", "TE", "FLEX", "DST"]
637
-
638
- split_portfolio = portfolio_dataframe
639
- split_portfolio[['QB_ID', 'QB']] = split_portfolio.QB.str.split(":", n=1, expand = True)
640
- split_portfolio[['RB1_ID', 'RB1']] = split_portfolio.RB1.str.split(":", n=1, expand = True)
641
- split_portfolio[['RB2_ID', 'RB2']] = split_portfolio.RB2.str.split(":", n=1, expand = True)
642
- split_portfolio[['WR1_ID', 'WR1']] = split_portfolio.WR1.str.split(":", n=1, expand = True)
643
- split_portfolio[['WR2_ID', 'WR2']] = split_portfolio.WR2.str.split(":", n=1, expand = True)
644
- split_portfolio[['WR3_ID', 'WR3']] = split_portfolio.WR3.str.split(":", n=1, expand = True)
645
- split_portfolio[['TE_ID', 'TE']] = split_portfolio.TE.str.split(":", n=1, expand = True)
646
- split_portfolio[['FLEX_ID', 'FLEX']] = split_portfolio.FLEX.str.split(":", n=1, expand = True)
647
- split_portfolio[['DST_ID', 'DST']] = split_portfolio.DST.str.split(":", n=1, expand = True)
648
-
649
- split_portfolio['QB'] = split_portfolio['QB'].str.strip()
650
- split_portfolio['RB1'] = split_portfolio['RB1'].str.strip()
651
- split_portfolio['RB2'] = split_portfolio['RB2'].str.strip()
652
- split_portfolio['WR1'] = split_portfolio['WR1'].str.strip()
653
- split_portfolio['WR2'] = split_portfolio['WR2'].str.strip()
654
- split_portfolio['WR3'] = split_portfolio['WR3'].str.strip()
655
- split_portfolio['TE'] = split_portfolio['TE'].str.strip()
656
- split_portfolio['FLEX'] = split_portfolio['FLEX'].str.strip()
657
- split_portfolio['DST'] = split_portfolio['DST'].str.strip()
658
-
659
- split_portfolio['Salary'] = sum([split_portfolio['QB'].map(player_salary_dict),
660
- split_portfolio['RB1'].map(player_salary_dict),
661
- split_portfolio['RB2'].map(player_salary_dict),
662
- split_portfolio['WR1'].map(player_salary_dict),
663
- split_portfolio['WR2'].map(player_salary_dict),
664
- split_portfolio['WR3'].map(player_salary_dict),
665
- split_portfolio['TE'].map(player_salary_dict),
666
- split_portfolio['FLEX'].map(player_salary_dict),
667
- split_portfolio['DST'].map(player_salary_dict)])
668
-
669
- split_portfolio['Projection'] = sum([split_portfolio['QB'].map(player_proj_dict),
670
- split_portfolio['RB1'].map(player_proj_dict),
671
- split_portfolio['RB2'].map(player_proj_dict),
672
- split_portfolio['WR1'].map(player_proj_dict),
673
- split_portfolio['WR2'].map(player_proj_dict),
674
- split_portfolio['WR3'].map(player_proj_dict),
675
- split_portfolio['TE'].map(player_proj_dict),
676
- split_portfolio['FLEX'].map(player_proj_dict),
677
- split_portfolio['DST'].map(player_proj_dict)])
678
-
679
- st.table(split_portfolio.head(10))
680
- split_portfolio['Ownership'] = sum([split_portfolio['QB'].map(player_own_dict),
681
- split_portfolio['RB1'].map(player_own_dict),
682
- split_portfolio['RB2'].map(player_own_dict),
683
- split_portfolio['WR1'].map(player_own_dict),
684
- split_portfolio['WR2'].map(player_own_dict),
685
- split_portfolio['WR3'].map(player_own_dict),
686
- split_portfolio['TE'].map(player_own_dict),
687
- split_portfolio['FLEX'].map(player_own_dict),
688
- split_portfolio['DST'].map(player_own_dict)])
689
-
690
- split_portfolio['QB_team'] = split_portfolio['QB'].map(player_team_dict)
691
- split_portfolio['RB1_team'] = split_portfolio['RB1'].map(player_team_dict)
692
- split_portfolio['RB2_team'] = split_portfolio['RB2'].map(player_team_dict)
693
- split_portfolio['WR1_team'] = split_portfolio['WR1'].map(player_team_dict)
694
- split_portfolio['WR2_team'] = split_portfolio['WR2'].map(player_team_dict)
695
- split_portfolio['WR3_team'] = split_portfolio['WR3'].map(player_team_dict)
696
- split_portfolio['TE_team'] = split_portfolio['TE'].map(player_team_dict)
697
- split_portfolio['FLEX_team'] = split_portfolio['FLEX'].map(player_team_dict)
698
- split_portfolio['DST_team'] = split_portfolio['DST'].map(player_team_dict)
699
-
700
- split_portfolio = split_portfolio[['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'Salary', 'Projection', 'Ownership', 'QB_team',
701
- 'RB1_team', 'RB2_team', 'WR1_team', 'WR2_team', 'WR3_team', 'TE_team', 'FLEX_team', 'DST_team']]
702
-
703
- split_portfolio['Main_Stack'] = split_portfolio.iloc[:, 15:19].apply(lambda row: row.value_counts().nlargest(2).index[0],axis=1)
704
- split_portfolio['Main_Stack_Size'] = split_portfolio.iloc[:, 15:19].apply(lambda row: row.value_counts().nlargest(2).values[0],axis=1)
705
- split_portfolio['Main_Stack_Size'] = split_portfolio['Main_Stack_Size'] - 1
706
-
707
- except:
708
- split_portfolio = portfolio_dataframe
709
-
710
- split_portfolio['Salary'] = sum([split_portfolio['QB'].map(player_salary_dict),
711
- split_portfolio['RB1'].map(player_salary_dict),
712
- split_portfolio['RB2'].map(player_salary_dict),
713
- split_portfolio['WR1'].map(player_salary_dict),
714
- split_portfolio['WR2'].map(player_salary_dict),
715
- split_portfolio['WR3'].map(player_salary_dict),
716
- split_portfolio['TE'].map(player_salary_dict),
717
- split_portfolio['FLEX'].map(player_salary_dict),
718
- split_portfolio['DST'].map(player_salary_dict)])
719
-
720
- split_portfolio['Projection'] = sum([split_portfolio['QB'].map(player_proj_dict),
721
- split_portfolio['RB1'].map(player_proj_dict),
722
- split_portfolio['RB2'].map(player_proj_dict),
723
- split_portfolio['WR1'].map(player_proj_dict),
724
- split_portfolio['WR2'].map(player_proj_dict),
725
- split_portfolio['WR3'].map(player_proj_dict),
726
- split_portfolio['TE'].map(player_proj_dict),
727
- split_portfolio['FLEX'].map(player_proj_dict),
728
- split_portfolio['DST'].map(player_proj_dict)])
729
-
730
- split_portfolio['Ownership'] = sum([split_portfolio['QB'].map(player_own_dict),
731
- split_portfolio['RB1'].map(player_own_dict),
732
- split_portfolio['RB2'].map(player_own_dict),
733
- split_portfolio['WR1'].map(player_own_dict),
734
- split_portfolio['WR2'].map(player_own_dict),
735
- split_portfolio['WR3'].map(player_own_dict),
736
- split_portfolio['TE'].map(player_own_dict),
737
- split_portfolio['FLEX'].map(player_own_dict),
738
- split_portfolio['DST'].map(player_own_dict)])
739
-
740
- split_portfolio['QB_team'] = split_portfolio['QB'].map(player_team_dict)
741
- split_portfolio['RB1_team'] = split_portfolio['RB1'].map(player_team_dict)
742
- split_portfolio['RB2_team'] = split_portfolio['RB2'].map(player_team_dict)
743
- split_portfolio['WR1_team'] = split_portfolio['WR1'].map(player_team_dict)
744
- split_portfolio['WR2_team'] = split_portfolio['WR2'].map(player_team_dict)
745
- split_portfolio['WR3_team'] = split_portfolio['WR3'].map(player_team_dict)
746
- split_portfolio['TE_team'] = split_portfolio['TE'].map(player_team_dict)
747
- split_portfolio['FLEX_team'] = split_portfolio['FLEX'].map(player_team_dict)
748
- split_portfolio['DST_team'] = split_portfolio['DST'].map(player_team_dict)
749
-
750
- split_portfolio = split_portfolio[['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'Salary', 'Projection', 'Ownership', 'QB_team',
751
- 'RB1_team', 'RB2_team', 'WR1_team', 'WR2_team', 'WR3_team', 'TE_team', 'FLEX_team', 'DST_team']]
752
-
753
- split_portfolio['Main_Stack'] = split_portfolio.iloc[:, 15:19].apply(lambda row: row.value_counts().nlargest(2).index[0],axis=1)
754
- split_portfolio['Main_Stack_Size'] = split_portfolio.iloc[:, 15:19].apply(lambda row: row.value_counts().nlargest(2).values[0],axis=1)
755
- split_portfolio['Main_Stack_Size'] = split_portfolio['Main_Stack_Size'] - 1
756
-
757
- for player_cols in split_portfolio.iloc[:, :9]:
758
- static_col_raw = split_portfolio[player_cols].value_counts()
759
- static_col = static_col_raw.to_frame()
760
- static_col.reset_index(inplace=True)
761
- static_col.columns = ['Player', 'count']
762
- static_exposure = pd.concat([static_exposure, static_col], ignore_index=True)
763
- static_exposure['Exposure'] = static_exposure['count'] / len(split_portfolio)
764
- static_exposure = static_exposure[['Player', 'Exposure']]
765
-
766
- del player_salary_dict
767
- del player_proj_dict
768
- del player_own_dict
769
- del player_team_dict
770
- del static_col_raw
771
- del static_col
772
- with st.container():
773
- col1, col2 = st.columns([3, 3])
774
-
775
- if portfolio_file is not None:
776
- with col1:
777
- team_split_var1 = st.radio("Are you wanting to isolate any lineups with specific main stacks?", ('Full Portfolio', 'Specific Stacks'))
778
- if team_split_var1 == 'Specific Stacks':
779
- team_var1 = st.multiselect('Which main stacks would you like to include in the Portfolio?', options = split_portfolio['Main_Stack'].unique())
780
- elif team_split_var1 == 'Full Portfolio':
781
- team_var1 = split_portfolio.Main_Stack.values.tolist()
782
- with col2:
783
- player_split_var1 = st.radio("Are you wanting to isolate any lineups with specific players?", ('Full Players', 'Specific Players'))
784
- if player_split_var1 == 'Specific Players':
785
- find_var1 = st.multiselect('Which players must be included in the lineups?', options = static_exposure['Player'].unique())
786
- elif player_split_var1 == 'Full Players':
787
- find_var1 = static_exposure.Player.values.tolist()
788
-
789
- split_portfolio = split_portfolio[split_portfolio['Main_Stack'].isin(team_var1)]
790
- if player_split_var1 == 'Specific Players':
791
- split_portfolio = split_portfolio[np.equal.outer(split_portfolio.to_numpy(copy=False), find_var1).any(axis=1).all(axis=1)]
792
- elif player_split_var1 == 'Full Players':
793
- split_portfolio = split_portfolio
794
-
795
- for player_cols in split_portfolio.iloc[:, :9]:
796
- exposure_col_raw = split_portfolio[player_cols].value_counts()
797
- exposure_col = exposure_col_raw.to_frame()
798
- exposure_col.reset_index(inplace=True)
799
- exposure_col.columns = ['Player', 'count']
800
- overall_exposure = pd.concat([overall_exposure, exposure_col], ignore_index=True)
801
- overall_exposure['Exposure'] = overall_exposure['count'] / len(split_portfolio)
802
- overall_exposure = overall_exposure.groupby('Player').sum()
803
- overall_exposure.reset_index(inplace=True)
804
- overall_exposure = overall_exposure[['Player', 'Exposure']]
805
- overall_exposure = overall_exposure.set_index('Player')
806
- overall_exposure = overall_exposure.sort_values(by='Exposure', ascending=False)
807
- overall_exposure['Exposure'] = overall_exposure['Exposure'].astype(float).map(lambda n: '{:.2%}'.format(n))
808
-
809
- with st.container():
810
- col1, col2 = st.columns([1, 6])
811
-
812
- with col1:
813
- if portfolio_file is not None:
814
- st.header('Exposure View')
815
- st.dataframe(overall_exposure)
816
-
817
- with col2:
818
- if portfolio_file is not None:
819
- st.header('Portfolio View')
820
- split_portfolio = split_portfolio.reset_index()
821
- split_portfolio['Lineup'] = split_portfolio['index'] + 1
822
- display_portfolio = split_portfolio[['Lineup', 'QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'Salary', 'Main_Stack', 'Main_Stack_Size', 'Projection', 'Ownership']]
823
- display_portfolio = display_portfolio.set_index('Lineup')
824
- st.dataframe(display_portfolio.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').background_gradient(cmap='RdYlGn_r', subset=['Ownership']).format(precision=2))
825
- del split_portfolio
826
- del exposure_col_raw
827
- del exposure_col
828
- with tab2:
829
- col1, col2 = st.columns([1, 7])
830
- with col1:
831
- st.info(t_stamp)
832
- if st.button("Load/Reset Data", key='reset1'):
833
- st.cache_data.clear()
834
- dk_roo_raw = load_dk_player_projections()
835
- fd_roo_raw = load_fd_player_projections()
836
- t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
837
- site_slates = set_slate_teams()
838
- dkid_dict, fdid_dict = set_export_ids()
839
-
840
- slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Thurs-Mon Slate', 'User'))
841
- site_var1 = st.radio("What site are you working with?", ('Draftkings', 'Fanduel'))
842
- if site_var1 == 'Draftkings':
843
- if slate_var1 == 'User':
844
- raw_baselines = proj_dataframe[['Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', 'Own']]
845
- elif slate_var1 != 'User':
846
- raw_baselines = dk_roo_raw[dk_roo_raw['slate'] == str(slate_var1)]
847
- raw_baselines = raw_baselines[raw_baselines['version'] == 'overall']
848
- elif site_var1 == 'Fanduel':
849
- if slate_var1 == 'User':
850
- raw_baselines = proj_dataframe
851
- elif slate_var1 != 'User':
852
- raw_baselines = fd_roo_raw[fd_roo_raw['slate'] == str(slate_var1)]
853
- raw_baselines = raw_baselines[raw_baselines['version'] == 'overall']
854
- st.info("If you are uploading a portfolio, note that there is an adjustments to projections and deviation mapping to prevent 'Projection Bias' and create a fair simulation")
855
- insert_port1 = st.selectbox("Are you uploading a portfolio?", ('No', 'Yes'), key='insert_port1')
856
- if insert_port1 == 'Yes':
857
- insert_port = 1
858
- elif insert_port1 == 'No':
859
- insert_port = 0
860
- contest_var1 = st.selectbox("What contest size are you simulating?", ('Small', 'Medium', 'Large'))
861
- if contest_var1 == 'Small':
862
- Contest_Size = 1000
863
- elif contest_var1 == 'Medium':
864
- Contest_Size = 5000
865
- elif contest_var1 == 'Large':
866
- Contest_Size = 10000
867
- linenum_var1 = 1000
868
- strength_var1 = st.selectbox("How sharp is the field in the contest?", ('Not Very', 'Average', 'Very'))
869
- if strength_var1 == 'Not Very':
870
- sharp_split = .33
871
- Strength_var = .50
872
- scaling_var = 5
873
- elif strength_var1 == 'Average':
874
- sharp_split = .50
875
- Strength_var = .25
876
- scaling_var = 10
877
- elif strength_var1 == 'Very':
878
- sharp_split = .75
879
- Strength_var = .01
880
- scaling_var = 15
881
-
882
- with col2:
883
- if st.button("Simulate Contest"):
884
- try:
885
- del dst_freq
886
- del flex_freq
887
- del te_freq
888
- del wr_freq
889
- del rb_freq
890
- del qb_freq
891
- del player_freq
892
- del Sim_Winner_Export
893
- del Sim_Winner_Frame
894
- except:
895
- pass
896
- with st.container():
897
- st.write('Contest Simulation Starting')
898
- seed_depth1 = 5
899
- Total_Runs = 1000000
900
- if Contest_Size <= 1000:
901
- strength_grow = .01
902
- elif Contest_Size > 1000 and Contest_Size <= 2500:
903
- strength_grow = .025
904
- elif Contest_Size > 2500 and Contest_Size <= 5000:
905
- strength_grow = .05
906
- elif Contest_Size > 5000 and Contest_Size <= 20000:
907
- strength_grow = .075
908
- elif Contest_Size > 20000:
909
- strength_grow = .1
910
-
911
- field_growth = 100 * strength_grow
912
-
913
- Sort_function = 'Median'
914
- if Sort_function == 'Median':
915
- Sim_function = 'Projection'
916
- elif Sort_function == 'Own':
917
- Sim_function = 'Own'
918
-
919
- if slate_var1 == 'User':
920
- OwnFrame = proj_dataframe
921
- if contest_var1 == 'Small':
922
- OwnFrame['Own%'] = np.where((OwnFrame['Position'] == 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (10 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean(), OwnFrame['Own'])
923
- OwnFrame['Own%'] = np.where((OwnFrame['Position'] != 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (5 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean(), OwnFrame['Own%'])
924
- OwnFrame['Own%'] = np.where(OwnFrame['Own%'] > 75, 75, OwnFrame['Own%'])
925
- OwnFrame['Own'] = OwnFrame['Own%'] * (900 / OwnFrame['Own%'].sum())
926
- if contest_var1 == 'Medium':
927
- OwnFrame['Own%'] = np.where((OwnFrame['Position'] == 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (6 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean(), OwnFrame['Own'])
928
- OwnFrame['Own%'] = np.where((OwnFrame['Position'] != 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (3 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean(), OwnFrame['Own%'])
929
- OwnFrame['Own%'] = np.where(OwnFrame['Own%'] > 75, 75, OwnFrame['Own%'])
930
- OwnFrame['Own'] = OwnFrame['Own%'] * (900 / OwnFrame['Own%'].sum())
931
- if contest_var1 == 'Large':
932
- OwnFrame['Own%'] = np.where((OwnFrame['Position'] == 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (3 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean(), OwnFrame['Own'])
933
- OwnFrame['Own%'] = np.where((OwnFrame['Position'] != 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (1.5 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean(), OwnFrame['Own%'])
934
- OwnFrame['Own%'] = np.where(OwnFrame['Own%'] > 75, 75, OwnFrame['Own%'])
935
- OwnFrame['Own'] = OwnFrame['Own%'] * (900 / OwnFrame['Own%'].sum())
936
- Overall_Proj = OwnFrame[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
937
-
938
- del OwnFrame
939
-
940
- elif slate_var1 != 'User':
941
- initial_proj = raw_baselines
942
- drop_frame = initial_proj.drop_duplicates(subset = 'Player',keep = 'first')
943
- OwnFrame = drop_frame[['Player', 'Team', 'Position', 'Median', 'Own', 'Floor', 'Ceiling', 'Salary']]
944
- if contest_var1 == 'Small':
945
- OwnFrame['Own%'] = np.where((OwnFrame['Position'] == 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (10 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean(), OwnFrame['Own'])
946
- OwnFrame['Own%'] = np.where((OwnFrame['Position'] != 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (5 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean(), OwnFrame['Own%'])
947
- OwnFrame['Own%'] = np.where(OwnFrame['Own%'] > 75, 75, OwnFrame['Own%'])
948
- OwnFrame['Own'] = OwnFrame['Own%'] * (900 / OwnFrame['Own%'].sum())
949
- if contest_var1 == 'Medium':
950
- OwnFrame['Own%'] = np.where((OwnFrame['Position'] == 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (6 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean(), OwnFrame['Own'])
951
- OwnFrame['Own%'] = np.where((OwnFrame['Position'] != 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (3 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean(), OwnFrame['Own%'])
952
- OwnFrame['Own%'] = np.where(OwnFrame['Own%'] > 75, 75, OwnFrame['Own%'])
953
- OwnFrame['Own'] = OwnFrame['Own%'] * (900 / OwnFrame['Own%'].sum())
954
- if contest_var1 == 'Large':
955
- OwnFrame['Own%'] = np.where((OwnFrame['Position'] == 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (3 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean(), OwnFrame['Own'])
956
- OwnFrame['Own%'] = np.where((OwnFrame['Position'] != 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (1.5 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean(), OwnFrame['Own%'])
957
- OwnFrame['Own%'] = np.where(OwnFrame['Own%'] > 75, 75, OwnFrame['Own%'])
958
- OwnFrame['Own'] = OwnFrame['Own%'] * (900 / OwnFrame['Own%'].sum())
959
- Overall_Proj = OwnFrame[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
960
-
961
- del initial_proj
962
- del drop_frame
963
- del OwnFrame
964
-
965
- if insert_port == 1:
966
- UserPortfolio = portfolio_dataframe[['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST']]
967
- elif insert_port == 0:
968
- UserPortfolio = pd.DataFrame(columns = ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST'])
969
-
970
- Overall_Proj.replace('', np.nan, inplace=True)
971
- Overall_Proj = Overall_Proj.dropna(subset=['Median'])
972
- Overall_Proj = Overall_Proj.assign(Value=lambda x: (x.Median / (x.Salary / 1000)))
973
- Overall_Proj['Sort_var'] = (Overall_Proj['Median'].rank(ascending=False) + Overall_Proj['Value'].rank(ascending=False)) / 2
974
- Overall_Proj = Overall_Proj.sort_values(by='Sort_var', ascending=False)
975
- Overall_Proj['Own'] = np.where((Overall_Proj['Median'] > 0) & (Overall_Proj['Own'] == 0), 1, Overall_Proj['Own'])
976
- Overall_Proj = Overall_Proj.loc[Overall_Proj['Own'] > 0]
977
-
978
- Overall_Proj['Floor'] = np.where(Overall_Proj['Position'] == 'QB', Overall_Proj['Median'] * .5, Overall_Proj['Median'] * .25)
979
- Overall_Proj['Ceiling'] = np.where(Overall_Proj['Position'] == 'WR', Overall_Proj['Median'] + Overall_Proj['Median'], Overall_Proj['Median'] + Overall_Proj['Floor'])
980
- Overall_Proj['STDev'] = Overall_Proj['Median'] / 4
981
-
982
- Teams_used = Overall_Proj['Team'].drop_duplicates().reset_index(drop=True)
983
- Teams_used = Teams_used.reset_index()
984
- Teams_used['team_item'] = Teams_used['index'] + 1
985
- Teams_used = Teams_used.drop(columns=['index'])
986
- Teams_used_dictraw = Teams_used.drop(columns=['team_item'])
987
- Teams_used_dict = Teams_used_dictraw.to_dict()
988
-
989
- del Teams_used_dictraw
990
-
991
- team_list = Teams_used['Team'].to_list()
992
- item_list = Teams_used['team_item'].to_list()
993
-
994
- FieldStrength_raw = Strength_var + ((30 - len(Teams_used)) * .01)
995
- FieldStrength = FieldStrength_raw - (FieldStrength_raw * (20000 / Contest_Size))
996
-
997
- del FieldStrength_raw
998
-
999
- if FieldStrength < 0:
1000
- FieldStrength = Strength_var
1001
- field_split = Strength_var
1002
-
1003
- for checkVar in range(len(team_list)):
1004
- Overall_Proj['Team'] = Overall_Proj['Team'].replace(team_list, item_list)
1005
-
1006
- qbs_raw = Overall_Proj[Overall_Proj.Position == 'QB']
1007
- qbs_raw.dropna(subset=['Median']).reset_index(drop=True)
1008
- qbs_raw = qbs_raw.reset_index(drop=True)
1009
- qbs_raw = qbs_raw.sort_values(by=['Median'], ascending=False)
1010
-
1011
- qbs = qbs_raw.head(round(len(qbs_raw)))
1012
- qbs = qbs.assign(Var = range(0,len(qbs)))
1013
- qb_dict = pd.Series(qbs.Player.values, index=qbs.Var).to_dict()
1014
-
1015
- defs_raw = Overall_Proj[Overall_Proj.Position.str.contains("D")]
1016
- defs_raw.dropna(subset=['Median']).reset_index(drop=True)
1017
- defs_raw = defs_raw.reset_index(drop=True)
1018
- defs_raw = defs_raw.sort_values(by=['Own', 'Value'], ascending=False)
1019
-
1020
- defs = defs_raw.head(round(len(defs_raw)))
1021
- defs = defs.assign(Var = range(0,len(defs)))
1022
- def_dict = pd.Series(defs.Player.values, index=defs.Var).to_dict()
1023
-
1024
- rbs_raw = Overall_Proj[Overall_Proj.Position == 'RB']
1025
- rbs_raw.dropna(subset=['Median']).reset_index(drop=True)
1026
- rbs_raw = rbs_raw.reset_index(drop=True)
1027
- rbs_raw = rbs_raw.sort_values(by=['Own', 'Value'], ascending=False)
1028
-
1029
- wrs_raw = Overall_Proj[Overall_Proj.Position == 'WR']
1030
- wrs_raw.dropna(subset=['Median']).reset_index(drop=True)
1031
- wrs_raw = wrs_raw.reset_index(drop=True)
1032
- wrs_raw = wrs_raw.sort_values(by=['Own', 'Median'], ascending=False)
1033
-
1034
- tes_raw = Overall_Proj[Overall_Proj.Position == 'TE']
1035
- tes_raw.dropna(subset=['Median']).reset_index(drop=True)
1036
- tes_raw = tes_raw.reset_index(drop=True)
1037
- tes_raw = tes_raw.sort_values(by=['Own', 'Value'], ascending=False)
1038
-
1039
- pos_players = pd.concat([rbs_raw, wrs_raw, tes_raw])
1040
- pos_players.dropna(subset=['Median']).reset_index(drop=True)
1041
- pos_players = pos_players.reset_index(drop=True)
1042
-
1043
- del qbs_raw
1044
- del defs_raw
1045
- del rbs_raw
1046
- del wrs_raw
1047
- del tes_raw
1048
-
1049
- if insert_port == 1:
1050
- try:
1051
- # Initialize an empty DataFrame for Raw Portfolio
1052
- Raw_Portfolio = pd.DataFrame()
1053
-
1054
- # Loop through each position and split the data accordingly
1055
- positions = ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST']
1056
- for pos in positions:
1057
- temp_df = UserPortfolio[pos].str.split("(", n=1, expand=True)
1058
- temp_df.columns = [pos, 'Drop']
1059
- Raw_Portfolio = pd.concat([Raw_Portfolio, temp_df], axis=1)
1060
-
1061
- # Select only necessary columns and strip white spaces
1062
- CleanPortfolio = Raw_Portfolio[positions].apply(lambda x: x.str.strip())
1063
- CleanPortfolio.reset_index(inplace=True)
1064
- CleanPortfolio['User/Field'] = CleanPortfolio['index'] + 1
1065
- CleanPortfolio.drop(columns=['index'], inplace=True)
1066
-
1067
- CleanPortfolio.replace('', np.nan, inplace=True)
1068
- CleanPortfolio.dropna(subset=['QB'], inplace=True)
1069
-
1070
- # Create frequency table for players
1071
- cleaport_players = pd.DataFrame(
1072
- np.column_stack(np.unique(CleanPortfolio.iloc[:, 0:9].values, return_counts=True)),
1073
- columns=['Player', 'Freq']
1074
- ).sort_values('Freq', ascending=False).reset_index(drop=True)
1075
- cleaport_players['Freq'] = cleaport_players['Freq'].astype(int)
1076
-
1077
- # Merge and update nerf_frame
1078
- nerf_frame = pd.merge(cleaport_players, Overall_Proj, on='Player', how='left')
1079
- for col in ['Median', 'Floor', 'Ceiling', 'STDev']:
1080
- nerf_frame[col] *= 0.90
1081
- del Raw_Portfolio
1082
- except:
1083
- CleanPortfolio = UserPortfolio.reset_index()
1084
- CleanPortfolio['User/Field'] = CleanPortfolio['index'] + 1
1085
- CleanPortfolio.drop(columns=['index'], inplace=True)
1086
-
1087
- # Replace empty strings and drop rows with NaN in 'QB' column
1088
- CleanPortfolio.replace('', np.nan, inplace=True)
1089
- CleanPortfolio.dropna(subset=['QB'], inplace=True)
1090
-
1091
- # Create frequency table for players
1092
- cleaport_players = pd.DataFrame(
1093
- np.column_stack(np.unique(CleanPortfolio.iloc[:, 0:9].values, return_counts=True)),
1094
- columns=['Player', 'Freq']
1095
- ).sort_values('Freq', ascending=False).reset_index(drop=True)
1096
- cleaport_players['Freq'] = cleaport_players['Freq'].astype(int)
1097
-
1098
- # Merge and update nerf_frame
1099
- nerf_frame = pd.merge(cleaport_players, Overall_Proj, on='Player', how='left')
1100
- for col in ['Median', 'Floor', 'Ceiling', 'STDev']:
1101
- nerf_frame[col] *= 0.90
1102
-
1103
- elif insert_port == 0:
1104
- CleanPortfolio = UserPortfolio
1105
- cleaport_players = pd.DataFrame(np.column_stack(np.unique(CleanPortfolio.iloc[:,0:9].values, return_counts=True)),
1106
- columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
1107
- cleaport_players['Freq'] = cleaport_players['Freq'].astype(int)
1108
- nerf_frame = Overall_Proj
1109
-
1110
- ref_dict = {
1111
- 'pos':['RB', 'WR', 'TE', 'FLEX'],
1112
- 'pos_dfs':['RB_Table', 'WR_Table', 'TE_Table', 'FLEX_Table'],
1113
- 'pos_dicts':['rb_dict', 'wr_dict', 'te_dict', 'flex_dict']
1114
- }
1115
-
1116
- maps_dict = {
1117
- 'Floor_map':dict(zip(Overall_Proj.Player,Overall_Proj.Floor)),
1118
- 'Projection_map':dict(zip(Overall_Proj.Player,Overall_Proj.Median)),
1119
- 'Ceiling_map':dict(zip(Overall_Proj.Player,Overall_Proj.Ceiling)),
1120
- 'Salary_map':dict(zip(Overall_Proj.Player,Overall_Proj.Salary)),
1121
- 'Pos_map':dict(zip(Overall_Proj.Player,Overall_Proj.Position)),
1122
- 'Own_map':dict(zip(Overall_Proj.Player,Overall_Proj.Own)),
1123
- 'Team_map':dict(zip(Overall_Proj.Player,Overall_Proj.Team)),
1124
- 'STDev_map':dict(zip(Overall_Proj.Player,Overall_Proj.STDev)),
1125
- 'team_check_map':dict(zip(Overall_Proj.Player,Overall_Proj.Team))
1126
- }
1127
-
1128
- up_dict = {
1129
- 'Floor_map':dict(zip(cleaport_players.Player,nerf_frame.Floor)),
1130
- 'Projection_map':dict(zip(cleaport_players.Player,nerf_frame.Median)),
1131
- 'Ceiling_map':dict(zip(cleaport_players.Player,nerf_frame.Ceiling)),
1132
- 'Salary_map':dict(zip(cleaport_players.Player,nerf_frame.Salary)),
1133
- 'Pos_map':dict(zip(cleaport_players.Player,nerf_frame.Position)),
1134
- 'Own_map':dict(zip(cleaport_players.Player,nerf_frame.Own)),
1135
- 'Team_map':dict(zip(cleaport_players.Player,nerf_frame.Team)),
1136
- 'STDev_map':dict(zip(cleaport_players.Player,nerf_frame.STDev)),
1137
- 'team_check_map':dict(zip(cleaport_players.Player,nerf_frame.Team))
1138
- }
1139
-
1140
- del cleaport_players
1141
- del Overall_Proj
1142
- del nerf_frame
1143
-
1144
- st.write('Seed frame creation')
1145
- FinalPortfolio, maps_dict = run_seed_frame(seed_depth1, Strength_var, strength_grow, Teams_used, Total_Runs)
1146
-
1147
- Sim_size = linenum_var1
1148
- SimVar = 1
1149
- Sim_Winners = []
1150
- fp_array = FinalPortfolio.values
1151
-
1152
- if insert_port == 1:
1153
- up_array = CleanPortfolio.values
1154
-
1155
- # Pre-vectorize functions
1156
- vec_projection_map = np.vectorize(maps_dict['Projection_map'].__getitem__)
1157
- vec_stdev_map = np.vectorize(maps_dict['STDev_map'].__getitem__)
1158
-
1159
- if insert_port == 1:
1160
- vec_up_projection_map = np.vectorize(up_dict['Projection_map'].__getitem__)
1161
- vec_up_stdev_map = np.vectorize(up_dict['STDev_map'].__getitem__)
1162
-
1163
- st.write('Simulating contest on frames')
1164
-
1165
- while SimVar <= Sim_size:
1166
- try:
1167
- fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size-len(CleanPortfolio), replace=False)]
1168
-
1169
- sample_arrays1 = np.c_[
1170
- fp_random,
1171
- np.sum(np.random.normal(
1172
- loc=vec_projection_map(fp_random[:, :-5]),
1173
- scale=vec_stdev_map(fp_random[:, :-5])),
1174
- axis=1)
1175
- ]
1176
-
1177
- if insert_port == 1:
1178
- sample_arrays2 = np.c_[
1179
- up_array,
1180
- np.sum(np.random.normal(
1181
- loc=vec_up_projection_map(up_array[:, :-5]),
1182
- scale=vec_up_stdev_map(up_array[:, :-5])),
1183
- axis=1)
1184
- ]
1185
- sample_arrays = np.vstack((sample_arrays1, sample_arrays2))
1186
- else:
1187
- sample_arrays = sample_arrays1
1188
-
1189
- final_array = sample_arrays[sample_arrays[:, 10].argsort()[::-1]]
1190
- best_lineup = final_array[final_array[:, -1].argsort(kind='stable')[::-1][:1]]
1191
- Sim_Winners.append(best_lineup)
1192
- SimVar += 1
1193
-
1194
- except Exception as e:
1195
- # Handle exceptions here but avoid using try-except for control flow
1196
- print("Exception: ", e)
1197
-
1198
-
1199
- # del smple_arrays
1200
- # del smple_arrays1
1201
- # del smple_arrays2
1202
- # del final_array
1203
- # del best_lineup
1204
- st.write('Contest simulation complete')
1205
- # Initial setup
1206
- Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners), columns=FinalPortfolio.columns.tolist() + ['Fantasy'])
1207
- Sim_Winner_Frame['GPP_Proj'] = (Sim_Winner_Frame['Projection'] + Sim_Winner_Frame['Fantasy']) / 2
1208
-
1209
- # Type Casting
1210
- type_cast_dict = {'Salary': int, 'Projection': np.float16, 'Fantasy': np.float16, 'GPP_Proj': np.float16}
1211
- Sim_Winner_Frame = Sim_Winner_Frame.astype(type_cast_dict)
1212
-
1213
- # Sorting
1214
- Sim_Winner_Frame = Sim_Winner_Frame.sort_values(by='GPP_Proj', ascending=False)
1215
-
1216
- # Data Copying
1217
- Sim_Winner_Export = Sim_Winner_Frame.copy()
1218
-
1219
- # Conditional Replacement
1220
- columns_to_replace = ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST']
1221
-
1222
- if site_var1 == 'Draftkings':
1223
- replace_dict = dkid_dict
1224
- elif site_var1 == 'Fanduel':
1225
- replace_dict = fdid_dict
1226
-
1227
- for col in columns_to_replace:
1228
- Sim_Winner_Export[col].replace(replace_dict, inplace=True)
1229
-
1230
-
1231
- player_freq = pd.DataFrame(np.column_stack(np.unique(Sim_Winner_Frame.iloc[:,0:9].values, return_counts=True)),
1232
- columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
1233
- player_freq['Freq'] = player_freq['Freq'].astype(int)
1234
- player_freq['Position'] = player_freq['Player'].map(maps_dict['Pos_map'])
1235
- player_freq['Salary'] = player_freq['Player'].map(maps_dict['Salary_map'])
1236
- player_freq['Proj Own'] = player_freq['Player'].map(maps_dict['Own_map']) / 100
1237
- player_freq['Exposure'] = player_freq['Freq']/(Sim_size)
1238
- player_freq['Edge'] = player_freq['Exposure'] - player_freq['Proj Own']
1239
- player_freq['Team'] = player_freq['Player'].map(maps_dict['Team_map'])
1240
- for checkVar in range(len(team_list)):
1241
- player_freq['Team'] = player_freq['Team'].replace(item_list, team_list)
1242
-
1243
- player_freq = player_freq[['Player', 'Position', 'Team', 'Salary', 'Proj Own', 'Exposure', 'Edge']]
1244
-
1245
- qb_freq = pd.DataFrame(np.column_stack(np.unique(Sim_Winner_Frame.iloc[:,0:1].values, return_counts=True)),
1246
- columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
1247
- qb_freq['Freq'] = qb_freq['Freq'].astype(int)
1248
- qb_freq['Position'] = qb_freq['Player'].map(maps_dict['Pos_map'])
1249
- qb_freq['Salary'] = qb_freq['Player'].map(maps_dict['Salary_map'])
1250
- qb_freq['Proj Own'] = qb_freq['Player'].map(maps_dict['Own_map']) / 100
1251
- qb_freq['Exposure'] = qb_freq['Freq']/(Sim_size)
1252
- qb_freq['Edge'] = qb_freq['Exposure'] - qb_freq['Proj Own']
1253
- qb_freq['Team'] = qb_freq['Player'].map(maps_dict['Team_map'])
1254
- for checkVar in range(len(team_list)):
1255
- qb_freq['Team'] = qb_freq['Team'].replace(item_list, team_list)
1256
-
1257
- qb_freq = qb_freq[['Player', 'Team', 'Position', 'Salary', 'Proj Own', 'Exposure', 'Edge']]
1258
-
1259
- rb_freq = pd.DataFrame(np.column_stack(np.unique(Sim_Winner_Frame.iloc[:,[1, 2]].values, return_counts=True)),
1260
- columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
1261
- rb_freq['Freq'] = rb_freq['Freq'].astype(int)
1262
- rb_freq['Position'] = rb_freq['Player'].map(maps_dict['Pos_map'])
1263
- rb_freq['Salary'] = rb_freq['Player'].map(maps_dict['Salary_map'])
1264
- rb_freq['Proj Own'] = rb_freq['Player'].map(maps_dict['Own_map']) / 100
1265
- rb_freq['Exposure'] = rb_freq['Freq']/Sim_size
1266
- rb_freq['Edge'] = rb_freq['Exposure'] - rb_freq['Proj Own']
1267
- rb_freq['Team'] = rb_freq['Player'].map(maps_dict['Team_map'])
1268
- for checkVar in range(len(team_list)):
1269
- rb_freq['Team'] = rb_freq['Team'].replace(item_list, team_list)
1270
-
1271
- rb_freq = rb_freq[['Player', 'Team', 'Position', 'Salary', 'Proj Own', 'Exposure', 'Edge']]
1272
-
1273
- wr_freq = pd.DataFrame(np.column_stack(np.unique(Sim_Winner_Frame.iloc[:,[3, 4, 5]].values, return_counts=True)),
1274
- columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
1275
- wr_freq['Freq'] = wr_freq['Freq'].astype(int)
1276
- wr_freq['Position'] = wr_freq['Player'].map(maps_dict['Pos_map'])
1277
- wr_freq['Salary'] = wr_freq['Player'].map(maps_dict['Salary_map'])
1278
- wr_freq['Proj Own'] = wr_freq['Player'].map(maps_dict['Own_map']) / 100
1279
- wr_freq['Exposure'] = wr_freq['Freq']/Sim_size
1280
- wr_freq['Edge'] = wr_freq['Exposure'] - wr_freq['Proj Own']
1281
- wr_freq['Team'] = wr_freq['Player'].map(maps_dict['Team_map'])
1282
- for checkVar in range(len(team_list)):
1283
- wr_freq['Team'] = wr_freq['Team'].replace(item_list, team_list)
1284
-
1285
- wr_freq = wr_freq[['Player', 'Team', 'Position', 'Salary', 'Proj Own', 'Exposure', 'Edge']]
1286
-
1287
- te_freq = pd.DataFrame(np.column_stack(np.unique(Sim_Winner_Frame.iloc[:,[6]].values, return_counts=True)),
1288
- columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
1289
- te_freq['Freq'] = te_freq['Freq'].astype(int)
1290
- te_freq['Position'] = te_freq['Player'].map(maps_dict['Pos_map'])
1291
- te_freq['Salary'] = te_freq['Player'].map(maps_dict['Salary_map'])
1292
- te_freq['Proj Own'] = te_freq['Player'].map(maps_dict['Own_map']) / 100
1293
- te_freq['Exposure'] = te_freq['Freq']/Sim_size
1294
- te_freq['Edge'] = te_freq['Exposure'] - te_freq['Proj Own']
1295
- te_freq['Team'] = te_freq['Player'].map(maps_dict['Team_map'])
1296
- for checkVar in range(len(team_list)):
1297
- te_freq['Team'] = te_freq['Team'].replace(item_list, team_list)
1298
-
1299
- te_freq = te_freq[['Player', 'Team', 'Position', 'Salary', 'Proj Own', 'Exposure', 'Edge']]
1300
-
1301
- flex_freq = pd.DataFrame(np.column_stack(np.unique(Sim_Winner_Frame.iloc[:,[7]].values, return_counts=True)),
1302
- columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
1303
- flex_freq['Freq'] = flex_freq['Freq'].astype(int)
1304
- flex_freq['Position'] = flex_freq['Player'].map(maps_dict['Pos_map'])
1305
- flex_freq['Salary'] = flex_freq['Player'].map(maps_dict['Salary_map'])
1306
- flex_freq['Proj Own'] = flex_freq['Player'].map(maps_dict['Own_map']) / 100
1307
- flex_freq['Exposure'] = flex_freq['Freq']/Sim_size
1308
- flex_freq['Edge'] = flex_freq['Exposure'] - flex_freq['Proj Own']
1309
- flex_freq['Team'] = flex_freq['Player'].map(maps_dict['Team_map'])
1310
- for checkVar in range(len(team_list)):
1311
- flex_freq['Team'] = flex_freq['Team'].replace(item_list, team_list)
1312
-
1313
- flex_freq = flex_freq[['Player', 'Team', 'Position', 'Salary', 'Proj Own', 'Exposure', 'Edge']]
1314
-
1315
- dst_freq = pd.DataFrame(np.column_stack(np.unique(Sim_Winner_Frame.iloc[:,8:9].values, return_counts=True)),
1316
- columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
1317
- dst_freq['Freq'] = dst_freq['Freq'].astype(int)
1318
- dst_freq['Position'] = dst_freq['Player'].map(maps_dict['Pos_map'])
1319
- dst_freq['Salary'] = dst_freq['Player'].map(maps_dict['Salary_map'])
1320
- dst_freq['Proj Own'] = dst_freq['Player'].map(maps_dict['Own_map']) / 100
1321
- dst_freq['Exposure'] = dst_freq['Freq']/Sim_size
1322
- dst_freq['Edge'] = dst_freq['Exposure'] - dst_freq['Proj Own']
1323
- dst_freq['Team'] = dst_freq['Player'].map(maps_dict['Team_map'])
1324
- for checkVar in range(len(team_list)):
1325
- dst_freq['Team'] = dst_freq['Team'].replace(item_list, team_list)
1326
-
1327
- dst_freq = dst_freq[['Player', 'Team', 'Position', 'Salary', 'Proj Own', 'Exposure', 'Edge']]
1328
-
1329
- with st.container():
1330
- simulate_container = st.empty()
1331
- st.dataframe(Sim_Winner_Frame.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').background_gradient(cmap='RdYlGn_r', subset=['Own']).format(precision=2), use_container_width = True)
1332
-
1333
- st.download_button(
1334
- label="Export Tables",
1335
- data=convert_df_to_csv(Sim_Winner_Export),
1336
- file_name='NFL_consim_export.csv',
1337
- mime='text/csv',
1338
- )
1339
-
1340
- with st.container():
1341
- freq_container = st.empty()
1342
- tab1, tab2, tab3, tab4, tab5, tab6, tab7 = st.tabs(['Overall Exposures', 'QB Exposures', 'RB Exposures', 'WR Exposures', 'TE Exposures', 'FLEX Exposures', 'DST Exposures'])
1343
- with tab1:
1344
- st.dataframe(player_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
1345
- st.download_button(
1346
- label="Export Exposures",
1347
- data=convert_df_to_csv(player_freq),
1348
- file_name='player_freq_export.csv',
1349
- mime='text/csv',
1350
- )
1351
- with tab2:
1352
- st.dataframe(qb_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
1353
- st.download_button(
1354
- label="Export Exposures",
1355
- data=convert_df_to_csv(qb_freq),
1356
- file_name='qb_freq_export.csv',
1357
- mime='text/csv',
1358
- )
1359
- with tab3:
1360
- st.dataframe(rb_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
1361
- st.download_button(
1362
- label="Export Exposures",
1363
- data=convert_df_to_csv(rb_freq),
1364
- file_name='rb_freq_export.csv',
1365
- mime='text/csv',
1366
- )
1367
- with tab4:
1368
- st.dataframe(wr_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
1369
- st.download_button(
1370
- label="Export Exposures",
1371
- data=convert_df_to_csv(wr_freq),
1372
- file_name='wr_freq_export.csv',
1373
- mime='text/csv',
1374
- )
1375
- with tab5:
1376
- st.dataframe(te_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
1377
- st.download_button(
1378
- label="Export Exposures",
1379
- data=convert_df_to_csv(te_freq),
1380
- file_name='te_freq_export.csv',
1381
- mime='text/csv',
1382
- )
1383
- with tab6:
1384
- st.dataframe(flex_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
1385
- st.download_button(
1386
- label="Export Exposures",
1387
- data=convert_df_to_csv(flex_freq),
1388
- file_name='flex_freq_export.csv',
1389
- mime='text/csv',
1390
- )
1391
- with tab7:
1392
- st.dataframe(dst_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
1393
- st.download_button(
1394
- label="Export Exposures",
1395
- data=convert_df_to_csv(dst_freq),
1396
- file_name='dst_freq_export.csv',
1397
- mime='text/csv',
1398
- )