Multichem commited on
Commit
38b5924
·
1 Parent(s): 458996e

Upload app (3).py

Browse files
Files changed (1) hide show
  1. app (3).py +1182 -0
app (3).py ADDED
@@ -0,0 +1,1182 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ st.set_page_config(layout="wide")
3
+
4
+ for name in dir():
5
+ if not name.startswith('_'):
6
+ del globals()[name]
7
+
8
+ import numpy as np
9
+ import pandas as pd
10
+ import streamlit as st
11
+ import gspread
12
+ import random
13
+ import gc
14
+
15
+ @st.cache_resource
16
+ def init_conn():
17
+ scope = ['https://www.googleapis.com/auth/spreadsheets',
18
+ "https://www.googleapis.com/auth/drive"]
19
+
20
+ credentials = {
21
+ "type": "service_account",
22
+ "project_id": "sheets-api-connect-378620",
23
+ "private_key_id": "1005124050c80d085e2c5b344345715978dd9cc9",
24
+ "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtKa01beXwc88R\nnPZVQTNPVQuBnbwoOfc66gW3547ja/UEyIGAF112dt/VqHprRafkKGmlg55jqJNt\na4zceLKV+wTm7vBu7lDISTJfGzCf2TrxQYNqwMKE2LOjI69dBM8u4Dcb4k0wcp9v\ntW1ZzLVVuwTvmrg7JBHjiSaB+x5wxm/r3FOiJDXdlAgFlytzqgcyeZMJVKKBQHyJ\njEGg/1720A0numuOCt71w/2G0bDmijuj1e6tH32MwRWcvRNZ19K9ssyDz2S9p68s\nYDhIxX69OWxwScTIHLY6J2t8txf/XMivL/636fPlDADvBEVTdlT606n8CcKUVQeq\npUVdG+lfAgMBAAECggEAP38SUA7B69eTfRpo658ycOs3Amr0JW4H/bb1rNeAul0K\nZhwd/HnU4E07y81xQmey5kN5ZeNrD5EvqkZvSyMJHV0EEahZStwhjCfnDB/cxyix\nZ+kFhv4y9eK+kFpUAhBy5nX6T0O+2T6WvzAwbmbVsZ+X8kJyPuF9m8ldcPlD0sce\ntj8NwVq1ys52eosqs7zi2vjt+eMcaY393l4ls+vNq8Yf27cfyFw45W45CH/97/Nu\n5AmuzlCOAfFF+z4OC5g4rei4E/Qgpxa7/uom+BVfv9G0DIGW/tU6Sne0+37uoGKt\nW6DzhgtebUtoYkG7ZJ05BTXGp2lwgVcNRoPwnKJDxQKBgQDT5wYPUBDW+FHbvZSp\nd1m1UQuXyerqOTA9smFaM8sr/UraeH85DJPEIEk8qsntMBVMhvD3Pw8uIUeFNMYj\naLmZFObsL+WctepXrVo5NB6RtLB/jZYxiKMatMLUJIYtcKIp+2z/YtKiWcLnwotB\nWdCjVnPTxpkurmF2fWP/eewZ+wKBgQDRMtJg7etjvKyjYNQ5fARnCc+XsI3gkBe1\nX9oeXfhyfZFeBXWnZzN1ITgFHplDznmBdxAyYGiQdbbkdKQSghviUQ0igBvoDMYy\n1rWcy+a17Mj98uyNEfmb3X2cC6WpvOZaGHwg9+GY67BThwI3FqHIbyk6Ko09WlTX\nQpRQjMzU7QKBgAfi1iflu+q0LR+3a3vvFCiaToskmZiD7latd9AKk2ocsBd3Woy9\n+hXXecJHPOKV4oUJlJgvAZqe5HGBqEoTEK0wyPNLSQlO/9ypd+0fEnArwFHO7CMF\nycQprAKHJXM1eOOFFuZeQCaInqdPZy1UcV5Szla4UmUZWkk1m24blHzXAoGBAMcA\nyH4qdbxX9AYrC1dvsSRvgcnzytMvX05LU0uF6tzGtG0zVlub4ahvpEHCfNuy44UT\nxRWW/oFFaWjjyFxO5sWggpUqNuHEnRopg3QXx22SRRTGbN45li/+QAocTkgsiRh1\nqEcYZsO4mPCsQqAy6E2p6RcK+Xa+omxvSnVhq0x1AoGAKr8GdkCl4CF6rieLMAQ7\nLNBuuoYGaHoh8l5E2uOQpzwxVy/nMBcAv+2+KqHEzHryUv1owOi6pMLv7A9mTFoS\n18B0QRLuz5fSOsVnmldfC9fpUc6H8cH1SINZpzajqQA74bPwELJjnzrCnH79TnHG\nJuElxA33rFEjbgbzdyrE768=\n-----END PRIVATE KEY-----\n",
25
+ "client_email": "gspread-connection@sheets-api-connect-378620.iam.gserviceaccount.com",
26
+ "client_id": "106625872877651920064",
27
+ "auth_uri": "https://accounts.google.com/o/oauth2/auth",
28
+ "token_uri": "https://oauth2.googleapis.com/token",
29
+ "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
30
+ "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40sheets-api-connect-378620.iam.gserviceaccount.com"
31
+ }
32
+
33
+ gc_con = gspread.service_account_from_dict(credentials)
34
+
35
+ return gc_con
36
+
37
+ gcservice_account = init_conn()
38
+
39
+ freq_format = {'Proj Own': '{:.2%}', 'Exposure': '{:.2%}', 'Edge': '{:.2%}'}
40
+
41
+ @st.cache_resource(ttl = 300)
42
+ def load_dk_player_projections():
43
+ sh = gcservice_account.open_by_url('https://docs.google.com/spreadsheets/d/1I_1Ve3F4tftgfLQQoRKOJ351XfEG48s36OxXUKxmgS8/edit#gid=1391856348')
44
+ worksheet = sh.worksheet('DK_ROO')
45
+ load_display = pd.DataFrame(worksheet.get_all_records())
46
+ load_display.replace('', np.nan, inplace=True)
47
+ raw_display = load_display.dropna(subset=['Median'])
48
+
49
+ return raw_display
50
+
51
+ @st.cache_resource(ttl = 300)
52
+ def load_fd_player_projections():
53
+ sh = gcservice_account.open_by_url('https://docs.google.com/spreadsheets/d/1I_1Ve3F4tftgfLQQoRKOJ351XfEG48s36OxXUKxmgS8/edit#gid=1391856348')
54
+ worksheet = sh.worksheet('FD_ROO')
55
+ load_display = pd.DataFrame(worksheet.get_all_records())
56
+ load_display.replace('', np.nan, inplace=True)
57
+ raw_display = load_display.dropna(subset=['Median'])
58
+
59
+ return raw_display
60
+
61
+ @st.cache_resource(ttl = 300)
62
+ def set_export_ids():
63
+ sh = gcservice_account.open_by_url('https://docs.google.com/spreadsheets/d/1I_1Ve3F4tftgfLQQoRKOJ351XfEG48s36OxXUKxmgS8/edit#gid=1391856348')
64
+ worksheet = sh.worksheet('DK_ROO')
65
+ load_display = pd.DataFrame(worksheet.get_all_records())
66
+ load_display.replace('', np.nan, inplace=True)
67
+ raw_display = load_display.dropna(subset=['Median'])
68
+ dk_ids = dict(zip(raw_display['Player'], raw_display['player_id']))
69
+
70
+ worksheet = sh.worksheet('FD_ROO')
71
+ load_display = pd.DataFrame(worksheet.get_all_records())
72
+ load_display.replace('', np.nan, inplace=True)
73
+ raw_display = load_display.dropna(subset=['Median'])
74
+ fd_ids = dict(zip(raw_display['Player'], raw_display['player_id']))
75
+
76
+ return dk_ids, fd_ids
77
+
78
+ dk_roo_raw = load_dk_player_projections()
79
+ fd_roo_raw = load_fd_player_projections()
80
+ t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
81
+ dkid_dict, fdid_dict = set_export_ids()
82
+
83
+ static_exposure = pd.DataFrame(columns=['Player', 'count'])
84
+ overall_exposure = pd.DataFrame(columns=['Player', 'count'])
85
+
86
+ def sim_contest(Sim_size, FinalPortfolio, CleanPortfolio, maps_dict, up_dict, insert_port):
87
+ SimVar = 1
88
+ Sim_Winners = []
89
+ fp_array = FinalPortfolio.values
90
+
91
+ if insert_port == 1:
92
+ up_array = CleanPortfolio.values
93
+
94
+ # Pre-vectorize functions
95
+ vec_projection_map = np.vectorize(maps_dict['Projection_map'].__getitem__)
96
+ vec_stdev_map = np.vectorize(maps_dict['STDev_map'].__getitem__)
97
+
98
+ if insert_port == 1:
99
+ vec_up_projection_map = np.vectorize(up_dict['Projection_map'].__getitem__)
100
+ vec_up_stdev_map = np.vectorize(up_dict['STDev_map'].__getitem__)
101
+
102
+ st.write('Simulating contest on frames')
103
+
104
+ while SimVar <= Sim_size:
105
+ if insert_port == 1:
106
+ fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size-len(CleanPortfolio))]
107
+ elif insert_port == 0:
108
+ fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size)]
109
+
110
+ sample_arrays1 = np.c_[
111
+ fp_random,
112
+ np.sum(np.random.normal(
113
+ loc=vec_projection_map(fp_random[:, :-5]),
114
+ scale=vec_stdev_map(fp_random[:, :-5])),
115
+ axis=1)
116
+ ]
117
+
118
+ if insert_port == 1:
119
+ sample_arrays2 = np.c_[
120
+ up_array,
121
+ np.sum(np.random.normal(
122
+ loc=vec_up_projection_map(up_array[:, :-5]),
123
+ scale=vec_up_stdev_map(up_array[:, :-5])),
124
+ axis=1)
125
+ ]
126
+ sample_arrays = np.vstack((sample_arrays1, sample_arrays2))
127
+ else:
128
+ sample_arrays = sample_arrays1
129
+
130
+ final_array = sample_arrays[sample_arrays[:, 10].argsort()[::-1]]
131
+ best_lineup = final_array[final_array[:, -1].argsort(kind='stable')[::-1][:1]]
132
+ Sim_Winners.append(best_lineup)
133
+ SimVar += 1
134
+
135
+ return Sim_Winners
136
+
137
+ def run_seed_frame(seed_depth1, Strength_var, strength_grow, Teams_used, Total_Runs, field_growth):
138
+ RunsVar = 1
139
+ seed_depth_def = seed_depth1
140
+ Strength_var_def = Strength_var
141
+ strength_grow_def = strength_grow
142
+ Teams_used_def = Teams_used
143
+ Total_Runs_def = Total_Runs
144
+
145
+ st.write('Creating Seed Frames')
146
+
147
+ while RunsVar <= seed_depth_def:
148
+ if RunsVar <= 3:
149
+ FieldStrength = Strength_var_def
150
+ FinalPortfolio, maps_dict = get_correlated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
151
+ FinalPortfolio2, maps_dict2 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
152
+ FinalPortfolio_init = pd.concat([FinalPortfolio, FinalPortfolio2], axis=0)
153
+ maps_dict.update(maps_dict2)
154
+ elif RunsVar > 3 and RunsVar <= 4:
155
+ FieldStrength += (strength_grow_def + ((30 - len(Teams_used_def)) * .001))
156
+ FinalPortfolio3, maps_dict3 = get_correlated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
157
+ FinalPortfolio4, maps_dict4 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
158
+ FinalPortfolio_merge_3 = pd.concat([FinalPortfolio_init, FinalPortfolio3], axis=0)
159
+ FinalPortfolio_merge_4 = pd.concat([FinalPortfolio_merge_3, FinalPortfolio4], axis=0)
160
+ FinalPortfolio_step_2 = FinalPortfolio_merge_4.drop_duplicates(subset = ['Projection', 'Own'],keep = 'last').reset_index(drop = True)
161
+ maps_dict.update(maps_dict3)
162
+ maps_dict.update(maps_dict4)
163
+ elif RunsVar > 4:
164
+ FieldStrength = 1
165
+ FinalPortfolio5, maps_dict5 = get_correlated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
166
+ FinalPortfolio6, maps_dict6 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
167
+ FinalPortfolio_merge_5 = pd.concat([FinalPortfolio_step_2, FinalPortfolio5], axis=0)
168
+ FinalPortfolio_merge_6 = pd.concat([FinalPortfolio_merge_5, FinalPortfolio6], axis=0)
169
+ FinalPortfolio_export = FinalPortfolio_merge_6.drop_duplicates(subset = ['Projection', 'Own'],keep = 'last').reset_index(drop = True)
170
+ maps_dict.update(maps_dict5)
171
+ maps_dict.update(maps_dict6)
172
+ RunsVar += 1
173
+
174
+ return FinalPortfolio_export, maps_dict
175
+
176
+ def create_stack_options(player_data, wr_var):
177
+ merged_frame = pd.DataFrame(columns = ['QB', 'Player'])
178
+ data_raw = player_data.sort_values(by='Median', ascending=False)
179
+
180
+ for team in data_raw['Team'].unique():
181
+ data_split = data_raw.loc[data_raw['Team'] == team]
182
+ qb_frame = data_split.loc[data_split['Position'] == 'QB'].reset_index()
183
+ wr_frame = data_split.loc[data_split['Position'] == 'WR'].iloc[wr_var-1:wr_var]
184
+ wr_frame['QB'] = qb_frame['Player'][0]
185
+ merge_slice = wr_frame[['QB', 'Player']]
186
+ merged_frame = pd.concat([merged_frame, merge_slice])
187
+ merged_frame = merged_frame.reset_index()
188
+ correl_dict = dict(zip(merged_frame.QB, merged_frame.Player))
189
+
190
+ return correl_dict
191
+
192
+ def create_overall_dfs(pos_players, table_name, dict_name, pos):
193
+ if pos == "FLEX":
194
+ pos_players = pos_players.sort_values(by='Value', ascending=False)
195
+ table_name_raw = pos_players.reset_index(drop=True)
196
+ overall_table_name = table_name_raw.head(round(len(table_name_raw)))
197
+ overall_table_name = overall_table_name.assign(Var = range(0,len(overall_table_name)))
198
+ overall_dict_name = pd.Series(overall_table_name.Player.values, index=overall_table_name.Var).to_dict()
199
+ elif pos != "FLEX":
200
+ table_name_raw = pos_players[pos_players['Position'].str.contains(pos)].reset_index(drop=True)
201
+ overall_table_name = table_name_raw.head(round(len(table_name_raw)))
202
+ overall_table_name = overall_table_name.assign(Var = range(0,len(overall_table_name)))
203
+ overall_dict_name = pd.Series(overall_table_name.Player.values, index=overall_table_name.Var).to_dict()
204
+
205
+ return overall_table_name, overall_dict_name
206
+
207
+
208
+ def get_overall_merged_df():
209
+ ref_dict = {
210
+ 'pos':['RB', 'WR', 'TE', 'FLEX'],
211
+ 'pos_dfs':['RB_Table', 'WR_Table', 'TE_Table', 'FLEX_Table'],
212
+ 'pos_dicts':['rb_dict', 'wr_dict', 'te_dict', 'flex_dict']
213
+ }
214
+
215
+ for i in range(0,4):
216
+ ref_dict['pos_dfs'][i], ref_dict['pos_dicts'][i] =\
217
+ create_overall_dfs(pos_players, ref_dict['pos_dfs'][i], ref_dict['pos_dicts'][i], ref_dict['pos'][i])
218
+
219
+ df_out = pd.concat(ref_dict['pos_dfs'], ignore_index=True)
220
+
221
+ return ref_dict
222
+
223
+ def calculate_range_var(count, min_val, FieldStrength, field_growth):
224
+ var = round(len(count[0]) * FieldStrength)
225
+ var = max(var, min_val)
226
+ var += round(field_growth)
227
+
228
+ return min(var, len(count[0]))
229
+
230
+ def create_random_portfolio(Total_Sample_Size, raw_baselines, field_growth):
231
+
232
+ full_pos_player_dict = get_overall_merged_df()
233
+ qb_baselines = raw_baselines[raw_baselines['Position'] == 'QB']
234
+ qb_baselines = qb_baselines.drop_duplicates(subset='Team')
235
+ max_var = len(qb_baselines[qb_baselines['Position'] == 'QB'])
236
+
237
+ field_growth_rounded = round(field_growth)
238
+ ranges_dict = {}
239
+
240
+ # Calculate ranges
241
+ for df, dict_val, min_val, key in zip(ref_dict['pos_dfs'], ref_dict['pos_dicts'], [10, 20, 10, 30], ['RB', 'WR', 'TE', 'FLEX']):
242
+ count = create_overall_dfs(pos_players, df, dict_val, key)
243
+ ranges_dict[f"{key.lower()}_range"] = calculate_range_var(count, min_val, FieldStrength, field_growth_rounded)
244
+ if max_var <= 10:
245
+ ranges_dict['qb_range'] = round(max_var)
246
+ ranges_dict['dst_range'] = round(max_var)
247
+ elif max_var > 10 and max_var <= 16:
248
+ ranges_dict['qb_range'] = round(max_var / 1.5)
249
+ ranges_dict['dst_range'] = round(max_var)
250
+ elif max_var > 16:
251
+ ranges_dict['qb_range'] = round(max_var / 2)
252
+ ranges_dict['dst_range'] = round(max_var)
253
+
254
+ # Generate random portfolios
255
+ rng = np.random.default_rng()
256
+ total_elements = [1, 2, 3, 1, 1, 1]
257
+ keys = ['qb', 'rb', 'wr', 'te', 'flex', 'dst']
258
+
259
+ all_choices = [rng.choice(ranges_dict[f"{key}_range"], size=(Total_Sample_Size, elem)) for key, elem in zip(keys, total_elements)]
260
+ RandomPortfolio = pd.DataFrame(np.hstack(all_choices), columns=['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST'])
261
+ RandomPortfolio['User/Field'] = 0
262
+
263
+ return RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict
264
+
265
+ def get_correlated_portfolio_for_sim(Total_Sample_Size, sharp_split, field_growth):
266
+
267
+ sizesplit = round(Total_Sample_Size * sharp_split)
268
+
269
+ RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict = create_random_portfolio(sizesplit, raw_baselines, field_growth)
270
+ stack_num = random.randint(1, 3)
271
+ stacking_dict = create_stack_options(raw_baselines, stack_num)
272
+
273
+ RandomPortfolio['QB'] = pd.Series(list(RandomPortfolio['QB'].map(qb_dict)), dtype="string[pyarrow]")
274
+ RandomPortfolio['RB1'] = pd.Series(list(RandomPortfolio['RB1'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
275
+ RandomPortfolio['RB2'] = pd.Series(list(RandomPortfolio['RB2'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
276
+ RandomPortfolio['WR1'] = pd.Series(list(RandomPortfolio['QB'].map(stacking_dict)), dtype="string[pyarrow]")
277
+ RandomPortfolio['WR2'] = pd.Series(list(RandomPortfolio['WR2'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
278
+ RandomPortfolio['WR3'] = pd.Series(list(RandomPortfolio['WR3'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
279
+ RandomPortfolio['TE'] = pd.Series(list(RandomPortfolio['TE'].map(full_pos_player_dict['pos_dicts'][2])), dtype="string[pyarrow]")
280
+ RandomPortfolio['FLEX'] = pd.Series(list(RandomPortfolio['FLEX'].map(full_pos_player_dict['pos_dicts'][3])), dtype="string[pyarrow]")
281
+ RandomPortfolio['DST'] = pd.Series(list(RandomPortfolio['DST'].map(def_dict)), dtype="string[pyarrow]")
282
+ RandomPortfolio['plyr_list'] = RandomPortfolio[RandomPortfolio.columns.values.tolist()].values.tolist()
283
+ RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x)))
284
+ RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 10].drop(columns=['plyr_list','plyr_count']).\
285
+ reset_index(drop=True)
286
+
287
+ RandomPortfolio['QBs'] = RandomPortfolio['QB'].map(maps_dict['Salary_map']).astype(np.int32)
288
+ RandomPortfolio['RB1s'] = RandomPortfolio['RB1'].map(maps_dict['Salary_map']).astype(np.int32)
289
+ RandomPortfolio['RB2s'] = RandomPortfolio['RB2'].map(maps_dict['Salary_map']).astype(np.int32)
290
+ RandomPortfolio['WR1s'] = RandomPortfolio['WR1'].map(maps_dict['Salary_map']).astype(np.int32)
291
+ RandomPortfolio['WR2s'] = RandomPortfolio['WR2'].map(maps_dict['Salary_map']).astype(np.int32)
292
+ RandomPortfolio['WR3s'] = RandomPortfolio['WR3'].map(maps_dict['Salary_map']).astype(np.int32)
293
+ RandomPortfolio['TEs'] = RandomPortfolio['TE'].map(maps_dict['Salary_map']).astype(np.int32)
294
+ RandomPortfolio['FLEXs'] = RandomPortfolio['FLEX'].map(maps_dict['Salary_map']).astype(np.int32)
295
+ RandomPortfolio['DSTs'] = RandomPortfolio['DST'].map(maps_dict['Salary_map']).astype(np.int32)
296
+
297
+ RandomPortfolio['QBp'] = RandomPortfolio['QB'].map(maps_dict['Projection_map']).astype(np.float16)
298
+ RandomPortfolio['RB1p'] = RandomPortfolio['RB1'].map(maps_dict['Projection_map']).astype(np.float16)
299
+ RandomPortfolio['RB2p'] = RandomPortfolio['RB2'].map(maps_dict['Projection_map']).astype(np.float16)
300
+ RandomPortfolio['WR1p'] = RandomPortfolio['WR1'].map(maps_dict['Projection_map']).astype(np.float16)
301
+ RandomPortfolio['WR2p'] = RandomPortfolio['WR2'].map(maps_dict['Projection_map']).astype(np.float16)
302
+ RandomPortfolio['WR3p'] = RandomPortfolio['WR3'].map(maps_dict['Projection_map']).astype(np.float16)
303
+ RandomPortfolio['TEp'] = RandomPortfolio['TE'].map(maps_dict['Projection_map']).astype(np.float16)
304
+ RandomPortfolio['FLEXp'] = RandomPortfolio['FLEX'].map(maps_dict['Projection_map']).astype(np.float16)
305
+ RandomPortfolio['DSTp'] = RandomPortfolio['DST'].map(maps_dict['Projection_map']).astype(np.float16)
306
+
307
+ RandomPortfolio['QBo'] = RandomPortfolio['QB'].map(maps_dict['Own_map']).astype(np.float16)
308
+ RandomPortfolio['RB1o'] = RandomPortfolio['RB1'].map(maps_dict['Own_map']).astype(np.float16)
309
+ RandomPortfolio['RB2o'] = RandomPortfolio['RB2'].map(maps_dict['Own_map']).astype(np.float16)
310
+ RandomPortfolio['WR1o'] = RandomPortfolio['WR1'].map(maps_dict['Own_map']).astype(np.float16)
311
+ RandomPortfolio['WR2o'] = RandomPortfolio['WR2'].map(maps_dict['Own_map']).astype(np.float16)
312
+ RandomPortfolio['WR3o'] = RandomPortfolio['WR3'].map(maps_dict['Own_map']).astype(np.float16)
313
+ RandomPortfolio['TEo'] = RandomPortfolio['TE'].map(maps_dict['Own_map']).astype(np.float16)
314
+ RandomPortfolio['FLEXo'] = RandomPortfolio['FLEX'].map(maps_dict['Own_map']).astype(np.float16)
315
+ RandomPortfolio['DSTo'] = RandomPortfolio['DST'].map(maps_dict['Own_map']).astype(np.float16)
316
+
317
+ RandomPortArray = RandomPortfolio.to_numpy()
318
+
319
+ RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,10:19].astype(int))]
320
+ RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,19:28].astype(np.double))]
321
+ RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,28:37].astype(np.double))]
322
+
323
+ RandomPortArrayOut = np.delete(RandomPortArray, np.s_[10:37], axis=1)
324
+ RandomPortfolioDF = pd.DataFrame(RandomPortArrayOut, columns = ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'User/Field', 'Salary', 'Projection', 'Own'])
325
+ RandomPortfolioDF = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
326
+
327
+ if insert_port == 1:
328
+ CleanPortfolio['Salary'] = sum([CleanPortfolio['QB'].map(maps_dict['Salary_map']),
329
+ CleanPortfolio['RB1'].map(maps_dict['Salary_map']),
330
+ CleanPortfolio['RB2'].map(maps_dict['Salary_map']),
331
+ CleanPortfolio['WR1'].map(maps_dict['Salary_map']),
332
+ CleanPortfolio['WR2'].map(maps_dict['Salary_map']),
333
+ CleanPortfolio['WR3'].map(maps_dict['Salary_map']),
334
+ CleanPortfolio['TE'].map(maps_dict['Salary_map']),
335
+ CleanPortfolio['FLEX'].map(maps_dict['Salary_map']),
336
+ CleanPortfolio['DST'].map(maps_dict['Salary_map'])
337
+ ]).astype(np.int16)
338
+ if insert_port == 1:
339
+ CleanPortfolio['Projection'] = sum([CleanPortfolio['QB'].map(up_dict['Projection_map']),
340
+ CleanPortfolio['RB1'].map(up_dict['Projection_map']),
341
+ CleanPortfolio['RB2'].map(up_dict['Projection_map']),
342
+ CleanPortfolio['WR1'].map(up_dict['Projection_map']),
343
+ CleanPortfolio['WR2'].map(up_dict['Projection_map']),
344
+ CleanPortfolio['WR3'].map(up_dict['Projection_map']),
345
+ CleanPortfolio['TE'].map(up_dict['Projection_map']),
346
+ CleanPortfolio['FLEX'].map(up_dict['Projection_map']),
347
+ CleanPortfolio['DST'].map(up_dict['Projection_map'])
348
+ ]).astype(np.float16)
349
+ if insert_port == 1:
350
+ CleanPortfolio['Own'] = sum([CleanPortfolio['QB'].map(maps_dict['Own_map']),
351
+ CleanPortfolio['RB1'].map(maps_dict['Own_map']),
352
+ CleanPortfolio['RB2'].map(maps_dict['Own_map']),
353
+ CleanPortfolio['WR1'].map(maps_dict['Own_map']),
354
+ CleanPortfolio['WR2'].map(maps_dict['Own_map']),
355
+ CleanPortfolio['WR3'].map(maps_dict['Own_map']),
356
+ CleanPortfolio['TE'].map(maps_dict['Own_map']),
357
+ CleanPortfolio['FLEX'].map(maps_dict['Own_map']),
358
+ CleanPortfolio['DST'].map(maps_dict['Own_map'])
359
+ ]).astype(np.float16)
360
+
361
+ if site_var1 == 'Draftkings':
362
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 50000].reset_index(drop=True)
363
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (49500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
364
+ elif site_var1 == 'Fanduel':
365
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 60000].reset_index(drop=True)
366
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (59500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
367
+
368
+ RandomPortfolio = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
369
+
370
+ RandomPortfolio = RandomPortfolio[['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'User/Field', 'Salary', 'Projection', 'Own']]
371
+
372
+ return RandomPortfolio, maps_dict
373
+
374
+ def get_uncorrelated_portfolio_for_sim(Total_Sample_Size, sharp_split, field_growth):
375
+
376
+ sizesplit = round(Total_Sample_Size * (1-sharp_split))
377
+
378
+ RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict = create_random_portfolio(sizesplit, raw_baselines, field_growth)
379
+
380
+ RandomPortfolio['QB'] = pd.Series(list(RandomPortfolio['QB'].map(qb_dict)), dtype="string[pyarrow]")
381
+ RandomPortfolio['RB1'] = pd.Series(list(RandomPortfolio['RB1'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
382
+ RandomPortfolio['RB2'] = pd.Series(list(RandomPortfolio['RB2'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
383
+ RandomPortfolio['WR1'] = pd.Series(list(RandomPortfolio['WR1'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
384
+ RandomPortfolio['WR2'] = pd.Series(list(RandomPortfolio['WR2'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
385
+ RandomPortfolio['WR3'] = pd.Series(list(RandomPortfolio['WR3'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
386
+ RandomPortfolio['TE'] = pd.Series(list(RandomPortfolio['TE'].map(full_pos_player_dict['pos_dicts'][2])), dtype="string[pyarrow]")
387
+ RandomPortfolio['FLEX'] = pd.Series(list(RandomPortfolio['FLEX'].map(full_pos_player_dict['pos_dicts'][3])), dtype="string[pyarrow]")
388
+ RandomPortfolio['DST'] = pd.Series(list(RandomPortfolio['DST'].map(def_dict)), dtype="string[pyarrow]")
389
+ RandomPortfolio['plyr_list'] = RandomPortfolio[RandomPortfolio.columns.values.tolist()].values.tolist()
390
+ RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x)))
391
+ RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 10].drop(columns=['plyr_list','plyr_count']).\
392
+ reset_index(drop=True)
393
+
394
+ RandomPortfolio['QBs'] = RandomPortfolio['QB'].map(maps_dict['Salary_map']).astype(np.int32)
395
+ RandomPortfolio['RB1s'] = RandomPortfolio['RB1'].map(maps_dict['Salary_map']).astype(np.int32)
396
+ RandomPortfolio['RB2s'] = RandomPortfolio['RB2'].map(maps_dict['Salary_map']).astype(np.int32)
397
+ RandomPortfolio['WR1s'] = RandomPortfolio['WR1'].map(maps_dict['Salary_map']).astype(np.int32)
398
+ RandomPortfolio['WR2s'] = RandomPortfolio['WR2'].map(maps_dict['Salary_map']).astype(np.int32)
399
+ RandomPortfolio['WR3s'] = RandomPortfolio['WR3'].map(maps_dict['Salary_map']).astype(np.int32)
400
+ RandomPortfolio['TEs'] = RandomPortfolio['TE'].map(maps_dict['Salary_map']).astype(np.int32)
401
+ RandomPortfolio['FLEXs'] = RandomPortfolio['FLEX'].map(maps_dict['Salary_map']).astype(np.int32)
402
+ RandomPortfolio['DSTs'] = RandomPortfolio['DST'].map(maps_dict['Salary_map']).astype(np.int32)
403
+
404
+ RandomPortfolio['QBp'] = RandomPortfolio['QB'].map(maps_dict['Projection_map']).astype(np.float16)
405
+ RandomPortfolio['RB1p'] = RandomPortfolio['RB1'].map(maps_dict['Projection_map']).astype(np.float16)
406
+ RandomPortfolio['RB2p'] = RandomPortfolio['RB2'].map(maps_dict['Projection_map']).astype(np.float16)
407
+ RandomPortfolio['WR1p'] = RandomPortfolio['WR1'].map(maps_dict['Projection_map']).astype(np.float16)
408
+ RandomPortfolio['WR2p'] = RandomPortfolio['WR2'].map(maps_dict['Projection_map']).astype(np.float16)
409
+ RandomPortfolio['WR3p'] = RandomPortfolio['WR3'].map(maps_dict['Projection_map']).astype(np.float16)
410
+ RandomPortfolio['TEp'] = RandomPortfolio['TE'].map(maps_dict['Projection_map']).astype(np.float16)
411
+ RandomPortfolio['FLEXp'] = RandomPortfolio['FLEX'].map(maps_dict['Projection_map']).astype(np.float16)
412
+ RandomPortfolio['DSTp'] = RandomPortfolio['DST'].map(maps_dict['Projection_map']).astype(np.float16)
413
+
414
+ RandomPortfolio['QBo'] = RandomPortfolio['QB'].map(maps_dict['Own_map']).astype(np.float16)
415
+ RandomPortfolio['RB1o'] = RandomPortfolio['RB1'].map(maps_dict['Own_map']).astype(np.float16)
416
+ RandomPortfolio['RB2o'] = RandomPortfolio['RB2'].map(maps_dict['Own_map']).astype(np.float16)
417
+ RandomPortfolio['WR1o'] = RandomPortfolio['WR1'].map(maps_dict['Own_map']).astype(np.float16)
418
+ RandomPortfolio['WR2o'] = RandomPortfolio['WR2'].map(maps_dict['Own_map']).astype(np.float16)
419
+ RandomPortfolio['WR3o'] = RandomPortfolio['WR3'].map(maps_dict['Own_map']).astype(np.float16)
420
+ RandomPortfolio['TEo'] = RandomPortfolio['TE'].map(maps_dict['Own_map']).astype(np.float16)
421
+ RandomPortfolio['FLEXo'] = RandomPortfolio['FLEX'].map(maps_dict['Own_map']).astype(np.float16)
422
+ RandomPortfolio['DSTo'] = RandomPortfolio['DST'].map(maps_dict['Own_map']).astype(np.float16)
423
+
424
+ RandomPortArray = RandomPortfolio.to_numpy()
425
+
426
+ RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,10:19].astype(int))]
427
+ RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,19:28].astype(np.double))]
428
+ RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,28:37].astype(np.double))]
429
+
430
+ RandomPortArrayOut = np.delete(RandomPortArray, np.s_[10:37], axis=1)
431
+ RandomPortfolioDF = pd.DataFrame(RandomPortArrayOut, columns = ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'User/Field', 'Salary', 'Projection', 'Own'])
432
+ RandomPortfolioDF = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
433
+
434
+ if insert_port == 1:
435
+ CleanPortfolio['Salary'] = sum([CleanPortfolio['QB'].map(maps_dict['Salary_map']),
436
+ CleanPortfolio['RB1'].map(maps_dict['Salary_map']),
437
+ CleanPortfolio['RB2'].map(maps_dict['Salary_map']),
438
+ CleanPortfolio['WR1'].map(maps_dict['Salary_map']),
439
+ CleanPortfolio['WR2'].map(maps_dict['Salary_map']),
440
+ CleanPortfolio['WR3'].map(maps_dict['Salary_map']),
441
+ CleanPortfolio['TE'].map(maps_dict['Salary_map']),
442
+ CleanPortfolio['FLEX'].map(maps_dict['Salary_map']),
443
+ CleanPortfolio['DST'].map(maps_dict['Salary_map'])
444
+ ]).astype(np.int16)
445
+ if insert_port == 1:
446
+ CleanPortfolio['Projection'] = sum([CleanPortfolio['QB'].map(up_dict['Projection_map']),
447
+ CleanPortfolio['RB1'].map(up_dict['Projection_map']),
448
+ CleanPortfolio['RB2'].map(up_dict['Projection_map']),
449
+ CleanPortfolio['WR1'].map(up_dict['Projection_map']),
450
+ CleanPortfolio['WR2'].map(up_dict['Projection_map']),
451
+ CleanPortfolio['WR3'].map(up_dict['Projection_map']),
452
+ CleanPortfolio['TE'].map(up_dict['Projection_map']),
453
+ CleanPortfolio['FLEX'].map(up_dict['Projection_map']),
454
+ CleanPortfolio['DST'].map(up_dict['Projection_map'])
455
+ ]).astype(np.float16)
456
+ if insert_port == 1:
457
+ CleanPortfolio['Own'] = sum([CleanPortfolio['QB'].map(maps_dict['Own_map']),
458
+ CleanPortfolio['RB1'].map(maps_dict['Own_map']),
459
+ CleanPortfolio['RB2'].map(maps_dict['Own_map']),
460
+ CleanPortfolio['WR1'].map(maps_dict['Own_map']),
461
+ CleanPortfolio['WR2'].map(maps_dict['Own_map']),
462
+ CleanPortfolio['WR3'].map(maps_dict['Own_map']),
463
+ CleanPortfolio['TE'].map(maps_dict['Own_map']),
464
+ CleanPortfolio['FLEX'].map(maps_dict['Own_map']),
465
+ CleanPortfolio['DST'].map(maps_dict['Own_map'])
466
+ ]).astype(np.float16)
467
+
468
+ if site_var1 == 'Draftkings':
469
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 50000].reset_index(drop=True)
470
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (49500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
471
+ elif site_var1 == 'Fanduel':
472
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 60000].reset_index(drop=True)
473
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (59500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
474
+
475
+ RandomPortfolio = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
476
+
477
+ RandomPortfolio = RandomPortfolio[['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'User/Field', 'Salary', 'Projection', 'Own']]
478
+
479
+ return RandomPortfolio, maps_dict
480
+
481
+ tab1, tab2 = st.tabs(['Uploads', 'Contest Sim'])
482
+
483
+ with tab1:
484
+ with st.container():
485
+ col1, col2 = st.columns([3, 3])
486
+
487
+ with col1:
488
+ st.info("The Projections file can have any columns in any order, but must contain columns explicitly named: 'Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', and 'Own'. Upload your projections first to avoid an error message.")
489
+ proj_file = st.file_uploader("Upload Projections File", key = 'proj_uploader')
490
+
491
+ if proj_file is not None:
492
+ try:
493
+ proj_dataframe = pd.read_csv(proj_file)
494
+ proj_dataframe = proj_dataframe.dropna(subset='Median')
495
+ proj_dataframe['Player'] = proj_dataframe['Player'].str.strip()
496
+ try:
497
+ proj_dataframe['Own'] = proj_dataframe['Own'].str.strip('%').astype(float)
498
+ except:
499
+ pass
500
+
501
+ except:
502
+ proj_dataframe = pd.read_excel(proj_file)
503
+ proj_dataframe = proj_dataframe.dropna(subset='Median')
504
+ proj_dataframe['Player'] = proj_dataframe['Player'].str.strip()
505
+ try:
506
+ proj_dataframe['Own'] = proj_dataframe['Own'].str.strip('%').astype(float)
507
+ except:
508
+ pass
509
+ st.table(proj_dataframe.head(10))
510
+ player_salary_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Salary))
511
+ player_proj_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Median))
512
+ player_own_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Own))
513
+
514
+ with col2:
515
+ st.info("The Portfolio file must contain only columns in order and explicitly named: 'QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', and 'DST'. Upload your projections first to avoid an error message.")
516
+ portfolio_file = st.file_uploader("Upload Portfolio File", key = 'portfolio_uploader')
517
+
518
+ if portfolio_file is not None:
519
+ try:
520
+ portfolio_dataframe = pd.read_csv(portfolio_file)
521
+
522
+ except:
523
+ portfolio_dataframe = pd.read_excel(portfolio_file)
524
+
525
+ try:
526
+ try:
527
+ portfolio_dataframe.columns=["QB", "RB1", "RB2", "WR1", "WR2", "WR3", "TE", "FLEX", "DST"]
528
+ split_portfolio = portfolio_dataframe
529
+ split_portfolio[['QB', 'QB_ID']] = split_portfolio.QB.str.split("(", n=1, expand = True)
530
+ split_portfolio[['RB1', 'RB1_ID']] = split_portfolio.RB1.str.split("(", n=1, expand = True)
531
+ split_portfolio[['RB2', 'RB2_ID']] = split_portfolio.RB2.str.split("(", n=1, expand = True)
532
+ split_portfolio[['WR1', 'WR1_ID']] = split_portfolio.WR1.str.split("(", n=1, expand = True)
533
+ split_portfolio[['WR2', 'WR2_ID']] = split_portfolio.WR2.str.split("(", n=1, expand = True)
534
+ split_portfolio[['WR3', 'WR3_ID']] = split_portfolio.WR3.str.split("(", n=1, expand = True)
535
+ split_portfolio[['TE', 'TE_ID']] = split_portfolio.TE.str.split("(", n=1, expand = True)
536
+ split_portfolio[['FLEX', 'FLEX_ID']] = split_portfolio.FLEX.str.split("(", n=1, expand = True)
537
+ split_portfolio[['DST', 'DST_ID']] = split_portfolio.DST.str.split("(", n=1, expand = True)
538
+
539
+ split_portfolio['QB'] = split_portfolio['QB'].str.strip()
540
+ split_portfolio['RB1'] = split_portfolio['RB1'].str.strip()
541
+ split_portfolio['RB2'] = split_portfolio['RB2'].str.strip()
542
+ split_portfolio['WR1'] = split_portfolio['WR1'].str.strip()
543
+ split_portfolio['WR2'] = split_portfolio['WR2'].str.strip()
544
+ split_portfolio['WR3'] = split_portfolio['WR3'].str.strip()
545
+ split_portfolio['TE'] = split_portfolio['TE'].str.strip()
546
+ split_portfolio['FLEX'] = split_portfolio['FLEX'].str.strip()
547
+ split_portfolio['DST'] = split_portfolio['DST'].str.strip()
548
+
549
+ st.table(split_portfolio.head(10))
550
+
551
+ split_portfolio['Salary'] = sum([split_portfolio['QB'].map(player_salary_dict),
552
+ split_portfolio['RB1'].map(player_salary_dict),
553
+ split_portfolio['RB2'].map(player_salary_dict),
554
+ split_portfolio['WR1'].map(player_salary_dict),
555
+ split_portfolio['WR2'].map(player_salary_dict),
556
+ split_portfolio['WR3'].map(player_salary_dict),
557
+ split_portfolio['TE'].map(player_salary_dict),
558
+ split_portfolio['FLEX'].map(player_salary_dict),
559
+ split_portfolio['DST'].map(player_salary_dict)])
560
+
561
+ split_portfolio['Projection'] = sum([split_portfolio['QB'].map(player_proj_dict),
562
+ split_portfolio['RB1'].map(player_proj_dict),
563
+ split_portfolio['RB2'].map(player_proj_dict),
564
+ split_portfolio['WR1'].map(player_proj_dict),
565
+ split_portfolio['WR2'].map(player_proj_dict),
566
+ split_portfolio['WR3'].map(player_proj_dict),
567
+ split_portfolio['TE'].map(player_proj_dict),
568
+ split_portfolio['FLEX'].map(player_proj_dict),
569
+ split_portfolio['DST'].map(player_proj_dict)])
570
+
571
+ split_portfolio['Ownership'] = sum([split_portfolio['QB'].map(player_own_dict),
572
+ split_portfolio['RB1'].map(player_own_dict),
573
+ split_portfolio['RB2'].map(player_own_dict),
574
+ split_portfolio['WR1'].map(player_own_dict),
575
+ split_portfolio['WR2'].map(player_own_dict),
576
+ split_portfolio['WR3'].map(player_own_dict),
577
+ split_portfolio['TE'].map(player_own_dict),
578
+ split_portfolio['FLEX'].map(player_own_dict),
579
+ split_portfolio['DST'].map(player_own_dict)])
580
+
581
+
582
+ except:
583
+ portfolio_dataframe.columns=["QB", "RB1", "RB2", "WR1", "WR2", "WR3", "TE", "FLEX", "DST"]
584
+
585
+ split_portfolio = portfolio_dataframe
586
+ split_portfolio[['QB_ID', 'QB']] = split_portfolio.QB.str.split(":", n=1, expand = True)
587
+ split_portfolio[['RB1_ID', 'RB1']] = split_portfolio.RB1.str.split(":", n=1, expand = True)
588
+ split_portfolio[['RB2_ID', 'RB2']] = split_portfolio.RB2.str.split(":", n=1, expand = True)
589
+ split_portfolio[['WR1_ID', 'WR1']] = split_portfolio.WR1.str.split(":", n=1, expand = True)
590
+ split_portfolio[['WR2_ID', 'WR2']] = split_portfolio.WR2.str.split(":", n=1, expand = True)
591
+ split_portfolio[['WR3_ID', 'WR3']] = split_portfolio.WR3.str.split(":", n=1, expand = True)
592
+ split_portfolio[['TE_ID', 'TE']] = split_portfolio.TE.str.split(":", n=1, expand = True)
593
+ split_portfolio[['FLEX_ID', 'FLEX']] = split_portfolio.FLEX.str.split(":", n=1, expand = True)
594
+ split_portfolio[['DST_ID', 'DST']] = split_portfolio.DST.str.split(":", n=1, expand = True)
595
+
596
+ split_portfolio['QB'] = split_portfolio['QB'].str.strip()
597
+ split_portfolio['RB1'] = split_portfolio['RB1'].str.strip()
598
+ split_portfolio['RB2'] = split_portfolio['RB2'].str.strip()
599
+ split_portfolio['WR1'] = split_portfolio['WR1'].str.strip()
600
+ split_portfolio['WR2'] = split_portfolio['WR2'].str.strip()
601
+ split_portfolio['WR3'] = split_portfolio['WR3'].str.strip()
602
+ split_portfolio['TE'] = split_portfolio['TE'].str.strip()
603
+ split_portfolio['FLEX'] = split_portfolio['FLEX'].str.strip()
604
+ split_portfolio['DST'] = split_portfolio['DST'].str.strip()
605
+
606
+ split_portfolio['Salary'] = sum([split_portfolio['QB'].map(player_salary_dict),
607
+ split_portfolio['RB1'].map(player_salary_dict),
608
+ split_portfolio['RB2'].map(player_salary_dict),
609
+ split_portfolio['WR1'].map(player_salary_dict),
610
+ split_portfolio['WR2'].map(player_salary_dict),
611
+ split_portfolio['WR3'].map(player_salary_dict),
612
+ split_portfolio['TE'].map(player_salary_dict),
613
+ split_portfolio['FLEX'].map(player_salary_dict),
614
+ split_portfolio['DST'].map(player_salary_dict)])
615
+
616
+ split_portfolio['Projection'] = sum([split_portfolio['QB'].map(player_proj_dict),
617
+ split_portfolio['RB1'].map(player_proj_dict),
618
+ split_portfolio['RB2'].map(player_proj_dict),
619
+ split_portfolio['WR1'].map(player_proj_dict),
620
+ split_portfolio['WR2'].map(player_proj_dict),
621
+ split_portfolio['WR3'].map(player_proj_dict),
622
+ split_portfolio['TE'].map(player_proj_dict),
623
+ split_portfolio['FLEX'].map(player_proj_dict),
624
+ split_portfolio['DST'].map(player_proj_dict)])
625
+
626
+ st.table(split_portfolio.head(10))
627
+ split_portfolio['Ownership'] = sum([split_portfolio['QB'].map(player_own_dict),
628
+ split_portfolio['RB1'].map(player_own_dict),
629
+ split_portfolio['RB2'].map(player_own_dict),
630
+ split_portfolio['WR1'].map(player_own_dict),
631
+ split_portfolio['WR2'].map(player_own_dict),
632
+ split_portfolio['WR3'].map(player_own_dict),
633
+ split_portfolio['TE'].map(player_own_dict),
634
+ split_portfolio['FLEX'].map(player_own_dict),
635
+ split_portfolio['DST'].map(player_own_dict)])
636
+
637
+ except:
638
+ split_portfolio = portfolio_dataframe
639
+
640
+ split_portfolio['Salary'] = sum([split_portfolio['QB'].map(player_salary_dict),
641
+ split_portfolio['RB1'].map(player_salary_dict),
642
+ split_portfolio['RB2'].map(player_salary_dict),
643
+ split_portfolio['WR1'].map(player_salary_dict),
644
+ split_portfolio['WR2'].map(player_salary_dict),
645
+ split_portfolio['WR3'].map(player_salary_dict),
646
+ split_portfolio['TE'].map(player_salary_dict),
647
+ split_portfolio['FLEX'].map(player_salary_dict),
648
+ split_portfolio['DST'].map(player_salary_dict)])
649
+
650
+ split_portfolio['Projection'] = sum([split_portfolio['QB'].map(player_proj_dict),
651
+ split_portfolio['RB1'].map(player_proj_dict),
652
+ split_portfolio['RB2'].map(player_proj_dict),
653
+ split_portfolio['WR1'].map(player_proj_dict),
654
+ split_portfolio['WR2'].map(player_proj_dict),
655
+ split_portfolio['WR3'].map(player_proj_dict),
656
+ split_portfolio['TE'].map(player_proj_dict),
657
+ split_portfolio['FLEX'].map(player_proj_dict),
658
+ split_portfolio['DST'].map(player_proj_dict)])
659
+
660
+ split_portfolio['Ownership'] = sum([split_portfolio['QB'].map(player_own_dict),
661
+ split_portfolio['RB1'].map(player_own_dict),
662
+ split_portfolio['RB2'].map(player_own_dict),
663
+ split_portfolio['WR1'].map(player_own_dict),
664
+ split_portfolio['WR2'].map(player_own_dict),
665
+ split_portfolio['WR3'].map(player_own_dict),
666
+ split_portfolio['TE'].map(player_own_dict),
667
+ split_portfolio['FLEX'].map(player_own_dict),
668
+ split_portfolio['DST'].map(player_own_dict)])
669
+
670
+ gc.collect()
671
+
672
+ with tab2:
673
+ col1, col2 = st.columns([1, 7])
674
+ with col1:
675
+ st.info(t_stamp)
676
+ if st.button("Load/Reset Data", key='reset1'):
677
+ st.cache_data.clear()
678
+ for key in st.session_state.keys():
679
+ del st.session_state[key]
680
+ dk_roo_raw = load_dk_player_projections()
681
+ fd_roo_raw = load_fd_player_projections()
682
+ t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
683
+ dkid_dict, fdid_dict = set_export_ids()
684
+
685
+ slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Thurs-Mon Slate', 'User'))
686
+ site_var1 = st.radio("What site are you working with?", ('Draftkings', 'Fanduel'))
687
+ if site_var1 == 'Draftkings':
688
+ if slate_var1 == 'User':
689
+ raw_baselines = proj_dataframe[['Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', 'Own']]
690
+ elif slate_var1 != 'User':
691
+ raw_baselines = dk_roo_raw[dk_roo_raw['slate'] == str(slate_var1)]
692
+ raw_baselines = raw_baselines[raw_baselines['version'] == 'overall']
693
+ elif site_var1 == 'Fanduel':
694
+ if slate_var1 == 'User':
695
+ raw_baselines = proj_dataframe
696
+ elif slate_var1 != 'User':
697
+ raw_baselines = fd_roo_raw[fd_roo_raw['slate'] == str(slate_var1)]
698
+ raw_baselines = raw_baselines[raw_baselines['version'] == 'overall']
699
+
700
+ st.info("If you are uploading a portfolio, note that there is an adjustments to projections and deviation mapping to prevent 'Projection Bias' and create a fair simulation")
701
+ insert_port1 = st.selectbox("Are you uploading a portfolio?", ('No', 'Yes'), key='insert_port1')
702
+ if insert_port1 == 'Yes':
703
+ insert_port = 1
704
+ elif insert_port1 == 'No':
705
+ insert_port = 0
706
+ contest_var1 = st.selectbox("What contest size are you simulating?", ('Small', 'Medium', 'Large'))
707
+ if contest_var1 == 'Small':
708
+ Contest_Size = 1000
709
+ elif contest_var1 == 'Medium':
710
+ Contest_Size = 5000
711
+ elif contest_var1 == 'Large':
712
+ Contest_Size = 10000
713
+ strength_var1 = st.selectbox("How sharp is the field in the contest?", ('Not Very', 'Average', 'Very'))
714
+ if strength_var1 == 'Not Very':
715
+ sharp_split = .33
716
+ Strength_var = .50
717
+ scaling_var = 5
718
+ elif strength_var1 == 'Average':
719
+ sharp_split = .50
720
+ Strength_var = .25
721
+ scaling_var = 10
722
+ elif strength_var1 == 'Very':
723
+ sharp_split = .75
724
+ Strength_var = .01
725
+ scaling_var = 15
726
+
727
+ Sort_function = 'Median'
728
+ Sim_function = 'Projection'
729
+
730
+ if Contest_Size <= 1000:
731
+ strength_grow = .01
732
+ elif Contest_Size > 1000 and Contest_Size <= 2500:
733
+ strength_grow = .025
734
+ elif Contest_Size > 2500 and Contest_Size <= 5000:
735
+ strength_grow = .05
736
+ elif Contest_Size > 5000 and Contest_Size <= 20000:
737
+ strength_grow = .075
738
+ elif Contest_Size > 20000:
739
+ strength_grow = .1
740
+
741
+ field_growth = 100 * strength_grow
742
+
743
+ with col2:
744
+ with st.container():
745
+ if st.button("Simulate Contest"):
746
+ with st.container():
747
+ for key in st.session_state.keys():
748
+ del st.session_state[key]
749
+
750
+ if slate_var1 == 'User':
751
+ initial_proj = proj_dataframe[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
752
+
753
+ # Define the calculation to be applied
754
+ def calculate_own(position, own, mean_own, factor, max_own=75):
755
+ return np.where((position == 'QB') & (own - mean_own >= 0),
756
+ own * (factor * (own - mean_own) / 100) + mean_own,
757
+ own)
758
+
759
+ # Set the factors based on the contest_var1
760
+ factor_qb, factor_other = {
761
+ 'Small': (10, 5),
762
+ 'Medium': (6, 3),
763
+ 'Large': (3, 1.5),
764
+ }[contest_var1]
765
+
766
+ # Apply the calculation to the DataFrame
767
+ initial_proj['Own%'] = initial_proj.apply(lambda row: calculate_own(row['Position'], row['Own'], initial_proj.loc[initial_proj['Position'] == row['Position'], 'Own'].mean(), factor_qb if row['Position'] == 'QB' else factor_other), axis=1)
768
+ initial_proj['Own%'] = initial_proj['Own%'].clip(upper=75)
769
+ initial_proj['Own'] = initial_proj['Own%'] * (900 / initial_proj['Own%'].sum())
770
+
771
+ # Drop unnecessary columns and create the final DataFrame
772
+ Overall_Proj = initial_proj[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
773
+
774
+ elif slate_var1 != 'User':
775
+ # Copy only the necessary columns
776
+ initial_proj = raw_baselines[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
777
+
778
+ # Define the calculation to be applied
779
+ def calculate_own(position, own, mean_own, factor, max_own=75):
780
+ return np.where((position == 'QB') & (own - mean_own >= 0),
781
+ own * (factor * (own - mean_own) / 100) + mean_own,
782
+ own)
783
+
784
+ # Set the factors based on the contest_var1
785
+ factor_qb, factor_other = {
786
+ 'Small': (10, 5),
787
+ 'Medium': (6, 3),
788
+ 'Large': (3, 1.5),
789
+ }[contest_var1]
790
+
791
+ # Apply the calculation to the DataFrame
792
+ initial_proj['Own%'] = initial_proj.apply(lambda row: calculate_own(row['Position'], row['Own'], initial_proj.loc[initial_proj['Position'] == row['Position'], 'Own'].mean(), factor_qb if row['Position'] == 'QB' else factor_other), axis=1)
793
+ initial_proj['Own%'] = initial_proj['Own%'].clip(upper=75)
794
+ initial_proj['Own'] = initial_proj['Own%'] * (900 / initial_proj['Own%'].sum())
795
+
796
+ # Drop unnecessary columns and create the final DataFrame
797
+ Overall_Proj = initial_proj[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
798
+
799
+ if insert_port == 1:
800
+ UserPortfolio = portfolio_dataframe[['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST']]
801
+ elif insert_port == 0:
802
+ UserPortfolio = pd.DataFrame(columns = ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST'])
803
+
804
+ Overall_Proj.replace('', np.nan, inplace=True)
805
+ Overall_Proj = Overall_Proj.dropna(subset=['Median'])
806
+ Overall_Proj = Overall_Proj.assign(Value=lambda x: (x.Median / (x.Salary / 1000)))
807
+ Overall_Proj['Sort_var'] = (Overall_Proj['Median'].rank(ascending=False) + Overall_Proj['Value'].rank(ascending=False)) / 2
808
+ Overall_Proj = Overall_Proj.sort_values(by='Sort_var', ascending=False)
809
+ Overall_Proj['Own'] = np.where((Overall_Proj['Median'] > 0) & (Overall_Proj['Own'] == 0), 1, Overall_Proj['Own'])
810
+ Overall_Proj = Overall_Proj.loc[Overall_Proj['Own'] > 0]
811
+
812
+ Overall_Proj['Floor'] = np.where(Overall_Proj['Position'] == 'QB', Overall_Proj['Median'] * .5, Overall_Proj['Median'] * .25)
813
+ Overall_Proj['Ceiling'] = np.where(Overall_Proj['Position'] == 'WR', Overall_Proj['Median'] + Overall_Proj['Median'], Overall_Proj['Median'] + Overall_Proj['Floor'])
814
+ Overall_Proj['STDev'] = Overall_Proj['Median'] / 4
815
+
816
+ Teams_used = Overall_Proj['Team'].drop_duplicates().reset_index(drop=True)
817
+ Teams_used = Teams_used.reset_index()
818
+ Teams_used['team_item'] = Teams_used['index'] + 1
819
+ Teams_used = Teams_used.drop(columns=['index'])
820
+ Teams_used_dictraw = Teams_used.drop(columns=['team_item'])
821
+
822
+ team_list = Teams_used['Team'].to_list()
823
+ item_list = Teams_used['team_item'].to_list()
824
+
825
+ FieldStrength_raw = Strength_var + ((30 - len(Teams_used)) * .01)
826
+ FieldStrength = FieldStrength_raw - (FieldStrength_raw * (20000 / Contest_Size))
827
+
828
+ if FieldStrength < 0:
829
+ FieldStrength = Strength_var
830
+ field_split = Strength_var
831
+
832
+ for checkVar in range(len(team_list)):
833
+ Overall_Proj['Team'] = Overall_Proj['Team'].replace(team_list, item_list)
834
+
835
+ qbs_raw = Overall_Proj[Overall_Proj.Position == 'QB']
836
+ qbs_raw.dropna(subset=['Median']).reset_index(drop=True)
837
+ qbs_raw = qbs_raw.reset_index(drop=True)
838
+ qbs_raw = qbs_raw.sort_values(by=['Median'], ascending=False)
839
+
840
+ qbs = qbs_raw.head(round(len(qbs_raw)))
841
+ qbs = qbs.assign(Var = range(0,len(qbs)))
842
+ qb_dict = pd.Series(qbs.Player.values, index=qbs.Var).to_dict()
843
+
844
+ defs_raw = Overall_Proj[Overall_Proj.Position.str.contains("D")]
845
+ defs_raw.dropna(subset=['Median']).reset_index(drop=True)
846
+ defs_raw = defs_raw.reset_index(drop=True)
847
+ defs_raw = defs_raw.sort_values(by=['Own', 'Value'], ascending=False)
848
+
849
+ defs = defs_raw.head(round(len(defs_raw)))
850
+ defs = defs.assign(Var = range(0,len(defs)))
851
+ def_dict = pd.Series(defs.Player.values, index=defs.Var).to_dict()
852
+
853
+ rbs_raw = Overall_Proj[Overall_Proj.Position == 'RB']
854
+ rbs_raw.dropna(subset=['Median']).reset_index(drop=True)
855
+ rbs_raw = rbs_raw.reset_index(drop=True)
856
+ rbs_raw = rbs_raw.sort_values(by=['Own', 'Value'], ascending=False)
857
+
858
+ wrs_raw = Overall_Proj[Overall_Proj.Position == 'WR']
859
+ wrs_raw.dropna(subset=['Median']).reset_index(drop=True)
860
+ wrs_raw = wrs_raw.reset_index(drop=True)
861
+ wrs_raw = wrs_raw.sort_values(by=['Own', 'Median'], ascending=False)
862
+
863
+ tes_raw = Overall_Proj[Overall_Proj.Position == 'TE']
864
+ tes_raw.dropna(subset=['Median']).reset_index(drop=True)
865
+ tes_raw = tes_raw.reset_index(drop=True)
866
+ tes_raw = tes_raw.sort_values(by=['Own', 'Value'], ascending=False)
867
+
868
+ pos_players = pd.concat([rbs_raw, wrs_raw, tes_raw])
869
+ pos_players.dropna(subset=['Median']).reset_index(drop=True)
870
+ pos_players = pos_players.reset_index(drop=True)
871
+
872
+ if insert_port == 1:
873
+ try:
874
+ # Initialize an empty DataFrame for Raw Portfolio
875
+ Raw_Portfolio = pd.DataFrame()
876
+
877
+ # Loop through each position and split the data accordingly
878
+ positions = ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST']
879
+ for pos in positions:
880
+ temp_df = UserPortfolio[pos].str.split("(", n=1, expand=True)
881
+ temp_df.columns = [pos, 'Drop']
882
+ Raw_Portfolio = pd.concat([Raw_Portfolio, temp_df], axis=1)
883
+
884
+ # Select only necessary columns and strip white spaces
885
+ CleanPortfolio = Raw_Portfolio[positions].apply(lambda x: x.str.strip())
886
+ CleanPortfolio.reset_index(inplace=True)
887
+ CleanPortfolio['User/Field'] = CleanPortfolio['index'] + 1
888
+ CleanPortfolio.drop(columns=['index'], inplace=True)
889
+
890
+ CleanPortfolio.replace('', np.nan, inplace=True)
891
+ CleanPortfolio.dropna(subset=['QB'], inplace=True)
892
+
893
+ # Create frequency table for players
894
+ cleaport_players = pd.DataFrame(
895
+ np.column_stack(np.unique(CleanPortfolio.iloc[:, 0:9].values, return_counts=True)),
896
+ columns=['Player', 'Freq']
897
+ ).sort_values('Freq', ascending=False).reset_index(drop=True)
898
+ cleaport_players['Freq'] = cleaport_players['Freq'].astype(int)
899
+
900
+ # Merge and update nerf_frame
901
+ nerf_frame = pd.merge(cleaport_players, Overall_Proj, on='Player', how='left')
902
+ for col in ['Median', 'Floor', 'Ceiling', 'STDev']:
903
+ nerf_frame[col] *= 0.90
904
+ except:
905
+ CleanPortfolio = UserPortfolio.reset_index()
906
+ CleanPortfolio['User/Field'] = CleanPortfolio['index'] + 1
907
+ CleanPortfolio.drop(columns=['index'], inplace=True)
908
+
909
+ # Replace empty strings and drop rows with NaN in 'QB' column
910
+ CleanPortfolio.replace('', np.nan, inplace=True)
911
+ CleanPortfolio.dropna(subset=['QB'], inplace=True)
912
+
913
+ # Create frequency table for players
914
+ cleaport_players = pd.DataFrame(
915
+ np.column_stack(np.unique(CleanPortfolio.iloc[:, 0:9].values, return_counts=True)),
916
+ columns=['Player', 'Freq']
917
+ ).sort_values('Freq', ascending=False).reset_index(drop=True)
918
+ cleaport_players['Freq'] = cleaport_players['Freq'].astype(int)
919
+
920
+ # Merge and update nerf_frame
921
+ nerf_frame = pd.merge(cleaport_players, Overall_Proj, on='Player', how='left')
922
+ for col in ['Median', 'Floor', 'Ceiling', 'STDev']:
923
+ nerf_frame[col] *= 0.90
924
+
925
+ elif insert_port == 0:
926
+ CleanPortfolio = UserPortfolio
927
+ cleaport_players = pd.DataFrame(np.column_stack(np.unique(CleanPortfolio.iloc[:,0:9].values, return_counts=True)),
928
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
929
+ cleaport_players['Freq'] = cleaport_players['Freq'].astype(int)
930
+ nerf_frame = Overall_Proj
931
+
932
+ ref_dict = {
933
+ 'pos':['RB', 'WR', 'TE', 'FLEX'],
934
+ 'pos_dfs':['RB_Table', 'WR_Table', 'TE_Table', 'FLEX_Table'],
935
+ 'pos_dicts':['rb_dict', 'wr_dict', 'te_dict', 'flex_dict']
936
+ }
937
+
938
+ maps_dict = {
939
+ 'Floor_map':dict(zip(Overall_Proj.Player,Overall_Proj.Floor)),
940
+ 'Projection_map':dict(zip(Overall_Proj.Player,Overall_Proj.Median)),
941
+ 'Ceiling_map':dict(zip(Overall_Proj.Player,Overall_Proj.Ceiling)),
942
+ 'Salary_map':dict(zip(Overall_Proj.Player,Overall_Proj.Salary)),
943
+ 'Pos_map':dict(zip(Overall_Proj.Player,Overall_Proj.Position)),
944
+ 'Own_map':dict(zip(Overall_Proj.Player,Overall_Proj.Own)),
945
+ 'Team_map':dict(zip(Overall_Proj.Player,Overall_Proj.Team)),
946
+ 'STDev_map':dict(zip(Overall_Proj.Player,Overall_Proj.STDev)),
947
+ 'team_check_map':dict(zip(Overall_Proj.Player,Overall_Proj.Team))
948
+ }
949
+
950
+ up_dict = {
951
+ 'Floor_map':dict(zip(cleaport_players.Player,nerf_frame.Floor)),
952
+ 'Projection_map':dict(zip(cleaport_players.Player,nerf_frame.Median)),
953
+ 'Ceiling_map':dict(zip(cleaport_players.Player,nerf_frame.Ceiling)),
954
+ 'Salary_map':dict(zip(cleaport_players.Player,nerf_frame.Salary)),
955
+ 'Pos_map':dict(zip(cleaport_players.Player,nerf_frame.Position)),
956
+ 'Own_map':dict(zip(cleaport_players.Player,nerf_frame.Own)),
957
+ 'Team_map':dict(zip(cleaport_players.Player,nerf_frame.Team)),
958
+ 'STDev_map':dict(zip(cleaport_players.Player,nerf_frame.STDev)),
959
+ 'team_check_map':dict(zip(cleaport_players.Player,nerf_frame.Team))
960
+ }
961
+
962
+ FinalPortfolio, maps_dict = run_seed_frame(5, Strength_var, strength_grow, Teams_used, 1000000, field_growth)
963
+
964
+ Sim_Winners = sim_contest(2500, FinalPortfolio, CleanPortfolio, maps_dict, up_dict, insert_port)
965
+
966
+ # Initial setup
967
+ Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners), columns=FinalPortfolio.columns.tolist() + ['Fantasy'])
968
+ Sim_Winner_Frame['GPP_Proj'] = (Sim_Winner_Frame['Projection'] + Sim_Winner_Frame['Fantasy']) / 2
969
+ Sim_Winner_Frame['unique_id'] = Sim_Winner_Frame['Projection'].astype(str) + Sim_Winner_Frame['Salary'].astype(str) + Sim_Winner_Frame['Own'].astype(str)
970
+ Sim_Winner_Frame = Sim_Winner_Frame.assign(win_count=Sim_Winner_Frame['unique_id'].map(Sim_Winner_Frame['unique_id'].value_counts()))
971
+
972
+ # Type Casting
973
+ type_cast_dict = {'Salary': int, 'Projection': np.float16, 'Fantasy': np.float16, 'GPP_Proj': np.float32}
974
+ Sim_Winner_Frame = Sim_Winner_Frame.astype(type_cast_dict)
975
+
976
+ del FinalPortfolio, insert_port, type_cast_dict
977
+
978
+ # Sorting
979
+ st.session_state.Sim_Winner_Frame = Sim_Winner_Frame.sort_values(by=['win_count', 'GPP_Proj'], ascending= [False, False]).copy().drop_duplicates(subset='unique_id').head(100)
980
+ st.session_state.Sim_Winner_Frame.drop(columns='unique_id', inplace=True)
981
+
982
+ # Data Copying
983
+ st.session_state.Sim_Winner_Export = Sim_Winner_Frame.copy()
984
+
985
+ # Data Copying
986
+ st.session_state.Sim_Winner_Display = Sim_Winner_Frame.copy()
987
+
988
+ # Conditional Replacement
989
+ columns_to_replace = ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST']
990
+
991
+ if site_var1 == 'Draftkings':
992
+ replace_dict = dkid_dict
993
+ elif site_var1 == 'Fanduel':
994
+ replace_dict = fdid_dict
995
+
996
+ for col in columns_to_replace:
997
+ st.session_state.Sim_Winner_Export[col].replace(replace_dict, inplace=True)
998
+
999
+ del replace_dict, Sim_Winner_Frame, Sim_Winners
1000
+
1001
+ st.session_state.player_freq = pd.DataFrame(np.column_stack(np.unique(st.session_state.Sim_Winner_Display.iloc[:,0:9].values, return_counts=True)),
1002
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
1003
+ st.session_state.player_freq['Freq'] = st.session_state.player_freq['Freq'].astype(int)
1004
+ st.session_state.player_freq['Position'] = st.session_state.player_freq['Player'].map(maps_dict['Pos_map'])
1005
+ st.session_state.player_freq['Salary'] = st.session_state.player_freq['Player'].map(maps_dict['Salary_map'])
1006
+ st.session_state.player_freq['Proj Own'] = st.session_state.player_freq['Player'].map(maps_dict['Own_map']) / 100
1007
+ st.session_state.player_freq['Exposure'] = st.session_state.player_freq['Freq']/(2500)
1008
+ st.session_state.player_freq['Edge'] = st.session_state.player_freq['Exposure'] - st.session_state.player_freq['Proj Own']
1009
+ st.session_state.player_freq['Team'] = st.session_state.player_freq['Player'].map(maps_dict['Team_map'])
1010
+ for checkVar in range(len(team_list)):
1011
+ st.session_state.player_freq['Team'] = st.session_state.player_freq['Team'].replace(item_list, team_list)
1012
+
1013
+ st.session_state.qb_freq = pd.DataFrame(np.column_stack(np.unique(st.session_state.Sim_Winner_Display.iloc[:,0:1].values, return_counts=True)),
1014
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
1015
+ st.session_state.qb_freq['Freq'] = st.session_state.qb_freq['Freq'].astype(int)
1016
+ st.session_state.qb_freq['Position'] = st.session_state.qb_freq['Player'].map(maps_dict['Pos_map'])
1017
+ st.session_state.qb_freq['Salary'] = st.session_state.qb_freq['Player'].map(maps_dict['Salary_map'])
1018
+ st.session_state.qb_freq['Proj Own'] = st.session_state.qb_freq['Player'].map(maps_dict['Own_map']) / 100
1019
+ st.session_state.qb_freq['Exposure'] = st.session_state.qb_freq['Freq']/(2500)
1020
+ st.session_state.qb_freq['Edge'] = st.session_state.qb_freq['Exposure'] - st.session_state.qb_freq['Proj Own']
1021
+ st.session_state.qb_freq['Team'] = st.session_state.qb_freq['Player'].map(maps_dict['Team_map'])
1022
+ for checkVar in range(len(team_list)):
1023
+ st.session_state.qb_freq['Team'] = st.session_state.qb_freq['Team'].replace(item_list, team_list)
1024
+
1025
+ st.session_state.rb_freq = pd.DataFrame(np.column_stack(np.unique(st.session_state.Sim_Winner_Display.iloc[:,[1, 2]].values, return_counts=True)),
1026
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
1027
+ st.session_state.rb_freq['Freq'] = st.session_state.rb_freq['Freq'].astype(int)
1028
+ st.session_state.rb_freq['Position'] = st.session_state.rb_freq['Player'].map(maps_dict['Pos_map'])
1029
+ st.session_state.rb_freq['Salary'] = st.session_state.rb_freq['Player'].map(maps_dict['Salary_map'])
1030
+ st.session_state.rb_freq['Proj Own'] = st.session_state.rb_freq['Player'].map(maps_dict['Own_map']) / 100
1031
+ st.session_state.rb_freq['Exposure'] = st.session_state.rb_freq['Freq']/2500
1032
+ st.session_state.rb_freq['Edge'] = st.session_state.rb_freq['Exposure'] - st.session_state.rb_freq['Proj Own']
1033
+ st.session_state.rb_freq['Team'] = st.session_state.rb_freq['Player'].map(maps_dict['Team_map'])
1034
+ for checkVar in range(len(team_list)):
1035
+ st.session_state.rb_freq['Team'] = st.session_state.rb_freq['Team'].replace(item_list, team_list)
1036
+
1037
+ st.session_state.wr_freq = pd.DataFrame(np.column_stack(np.unique(st.session_state.Sim_Winner_Display.iloc[:,[3, 4, 5]].values, return_counts=True)),
1038
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
1039
+ st.session_state.wr_freq['Freq'] = st.session_state.wr_freq['Freq'].astype(int)
1040
+ st.session_state.wr_freq['Position'] = st.session_state.wr_freq['Player'].map(maps_dict['Pos_map'])
1041
+ st.session_state.wr_freq['Salary'] = st.session_state.wr_freq['Player'].map(maps_dict['Salary_map'])
1042
+ st.session_state.wr_freq['Proj Own'] = st.session_state.wr_freq['Player'].map(maps_dict['Own_map']) / 100
1043
+ st.session_state.wr_freq['Exposure'] = st.session_state.wr_freq['Freq']/2500
1044
+ st.session_state.wr_freq['Edge'] = st.session_state.wr_freq['Exposure'] - st.session_state.wr_freq['Proj Own']
1045
+ st.session_state.wr_freq['Team'] = st.session_state.wr_freq['Player'].map(maps_dict['Team_map'])
1046
+ for checkVar in range(len(team_list)):
1047
+ st.session_state.wr_freq['Team'] = st.session_state.wr_freq['Team'].replace(item_list, team_list)
1048
+
1049
+ st.session_state.te_freq = pd.DataFrame(np.column_stack(np.unique(st.session_state.Sim_Winner_Display.iloc[:,[6]].values, return_counts=True)),
1050
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
1051
+ st.session_state.te_freq['Freq'] = st.session_state.te_freq['Freq'].astype(int)
1052
+ st.session_state.te_freq['Position'] = st.session_state.te_freq['Player'].map(maps_dict['Pos_map'])
1053
+ st.session_state.te_freq['Salary'] = st.session_state.te_freq['Player'].map(maps_dict['Salary_map'])
1054
+ st.session_state.te_freq['Proj Own'] = st.session_state.te_freq['Player'].map(maps_dict['Own_map']) / 100
1055
+ st.session_state.te_freq['Exposure'] = st.session_state.te_freq['Freq']/2500
1056
+ st.session_state.te_freq['Edge'] = st.session_state.te_freq['Exposure'] - st.session_state.te_freq['Proj Own']
1057
+ st.session_state.te_freq['Team'] = st.session_state.te_freq['Player'].map(maps_dict['Team_map'])
1058
+ for checkVar in range(len(team_list)):
1059
+ st.session_state.te_freq['Team'] = st.session_state.te_freq['Team'].replace(item_list, team_list)
1060
+
1061
+ st.session_state.flex_freq = pd.DataFrame(np.column_stack(np.unique(st.session_state.Sim_Winner_Display.iloc[:,[7]].values, return_counts=True)),
1062
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
1063
+ st.session_state.flex_freq['Freq'] = st.session_state.flex_freq['Freq'].astype(int)
1064
+ st.session_state.flex_freq['Position'] = st.session_state.flex_freq['Player'].map(maps_dict['Pos_map'])
1065
+ st.session_state.flex_freq['Salary'] = st.session_state.flex_freq['Player'].map(maps_dict['Salary_map'])
1066
+ st.session_state.flex_freq['Proj Own'] = st.session_state.flex_freq['Player'].map(maps_dict['Own_map']) / 100
1067
+ st.session_state.flex_freq['Exposure'] = st.session_state.flex_freq['Freq']/2500
1068
+ st.session_state.flex_freq['Edge'] = st.session_state.flex_freq['Exposure'] - st.session_state.flex_freq['Proj Own']
1069
+ st.session_state.flex_freq['Team'] = st.session_state.flex_freq['Player'].map(maps_dict['Team_map'])
1070
+ for checkVar in range(len(team_list)):
1071
+ st.session_state.flex_freq['Team'] = st.session_state.flex_freq['Team'].replace(item_list, team_list)
1072
+
1073
+ st.session_state.dst_freq = pd.DataFrame(np.column_stack(np.unique(st.session_state.Sim_Winner_Display.iloc[:,8:9].values, return_counts=True)),
1074
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
1075
+ st.session_state.dst_freq['Freq'] = st.session_state.dst_freq['Freq'].astype(int)
1076
+ st.session_state.dst_freq['Position'] = st.session_state.dst_freq['Player'].map(maps_dict['Pos_map'])
1077
+ st.session_state.dst_freq['Salary'] = st.session_state.dst_freq['Player'].map(maps_dict['Salary_map'])
1078
+ st.session_state.dst_freq['Proj Own'] = st.session_state.dst_freq['Player'].map(maps_dict['Own_map']) / 100
1079
+ st.session_state.dst_freq['Exposure'] = st.session_state.dst_freq['Freq']/2500
1080
+ st.session_state.dst_freq['Edge'] = st.session_state.dst_freq['Exposure'] - st.session_state.dst_freq['Proj Own']
1081
+ st.session_state.dst_freq['Team'] = st.session_state.dst_freq['Player'].map(maps_dict['Team_map'])
1082
+ for checkVar in range(len(team_list)):
1083
+ st.session_state.dst_freq['Team'] = st.session_state.dst_freq['Team'].replace(item_list, team_list)
1084
+
1085
+ with st.container():
1086
+ if 'player_freq' in st.session_state:
1087
+ player_split_var2 = st.radio("Are you wanting to isolate any lineups with specific players?", ('Full Players', 'Specific Players'), key='player_split_var2')
1088
+ if player_split_var2 == 'Specific Players':
1089
+ find_var2 = st.multiselect('Which players must be included in the lineups?', options = st.session_state.player_freq['Player'].unique())
1090
+ elif player_split_var2 == 'Full Players':
1091
+ find_var2 = st.session_state.player_freq.Player.values.tolist()
1092
+
1093
+ if player_split_var2 == 'Specific Players':
1094
+ st.session_state.Sim_Winner_Display = st.session_state.Sim_Winner_Frame[np.equal.outer(st.session_state.Sim_Winner_Frame.to_numpy(), find_var2).any(axis=1).all(axis=1)]
1095
+ if player_split_var2 == 'Full Players':
1096
+ st.session_state.Sim_Winner_Display = st.session_state.Sim_Winner_Frame
1097
+ if 'Sim_Winner_Display' in st.session_state:
1098
+ st.dataframe(st.session_state.Sim_Winner_Display.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').background_gradient(cmap='RdYlGn_r', subset=['Own']).format(precision=2), use_container_width = True)
1099
+ if 'Sim_Winner_Export' in st.session_state:
1100
+ st.download_button(
1101
+ label="Export Full Frame",
1102
+ data=st.session_state.Sim_Winner_Export.to_csv().encode('utf-8'),
1103
+ file_name='NFL_consim_export.csv',
1104
+ mime='text/csv',
1105
+ )
1106
+
1107
+ with st.container():
1108
+ tab1, tab2, tab3, tab4, tab5, tab6, tab7 = st.tabs(['Overall Exposures', 'QB Exposures', 'RB Exposures', 'WR Exposures', 'TE Exposures', 'FLEX Exposures', 'DST Exposures'])
1109
+ with tab1:
1110
+ if 'player_freq' in st.session_state:
1111
+ st.dataframe(st.session_state.player_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
1112
+ st.download_button(
1113
+ label="Export Exposures",
1114
+ data=st.session_state.player_freq.to_csv().encode('utf-8'),
1115
+ file_name='player_freq_export.csv',
1116
+ mime='text/csv',
1117
+ )
1118
+ with tab2:
1119
+ if 'qb_freq' in st.session_state:
1120
+ st.dataframe(st.session_state.qb_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
1121
+ st.download_button(
1122
+ label="Export Exposures",
1123
+ data=st.session_state.qb_freq.to_csv().encode('utf-8'),
1124
+ file_name='qb_freq_export.csv',
1125
+ mime='text/csv',
1126
+ )
1127
+ with tab3:
1128
+ if 'rb_freq' in st.session_state:
1129
+ st.dataframe(st.session_state.rb_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
1130
+ st.download_button(
1131
+ label="Export Exposures",
1132
+ data=st.session_state.rb_freq.to_csv().encode('utf-8'),
1133
+ file_name='rb_freq_export.csv',
1134
+ mime='text/csv',
1135
+ )
1136
+ with tab4:
1137
+ if 'wr_freq' in st.session_state:
1138
+ st.dataframe(st.session_state.wr_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
1139
+ st.download_button(
1140
+ label="Export Exposures",
1141
+ data=st.session_state.wr_freq.to_csv().encode('utf-8'),
1142
+ file_name='wr_freq_export.csv',
1143
+ mime='text/csv',
1144
+ )
1145
+ with tab5:
1146
+ if 'te_freq' in st.session_state:
1147
+ st.dataframe(st.session_state.te_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
1148
+ st.download_button(
1149
+ label="Export Exposures",
1150
+ data=st.session_state.te_freq.to_csv().encode('utf-8'),
1151
+ file_name='te_freq_export.csv',
1152
+ mime='text/csv',
1153
+ )
1154
+ with tab6:
1155
+ if 'flex_freq' in st.session_state:
1156
+ st.dataframe(st.session_state.flex_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
1157
+ st.download_button(
1158
+ label="Export Exposures",
1159
+ data=st.session_state.flex_freq.to_csv().encode('utf-8'),
1160
+ file_name='flex_freq_export.csv',
1161
+ mime='text/csv',
1162
+ )
1163
+ with tab7:
1164
+ if 'dst_freq' in st.session_state:
1165
+ st.dataframe(st.session_state.dst_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
1166
+ st.download_button(
1167
+ label="Export Exposures",
1168
+ data=st.session_state.dst_freq.to_csv().encode('utf-8'),
1169
+ file_name='dst_freq_export.csv',
1170
+ mime='text/csv',
1171
+ )
1172
+
1173
+ del gcservice_account
1174
+ del dk_roo_raw, fd_roo_raw
1175
+ del t_stamp
1176
+ del dkid_dict, fdid_dict
1177
+ del static_exposure, overall_exposure
1178
+ del insert_port1, Contest_Size, sharp_split, Strength_var, scaling_var, Sort_function, Sim_function, strength_grow, field_growth
1179
+ del raw_baselines
1180
+ del freq_format
1181
+
1182
+ gc.collect()