Multichem commited on
Commit
4b7b7d2
·
1 Parent(s): b4215db

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +1006 -0
app.py ADDED
@@ -0,0 +1,1006 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ st.set_page_config(layout="wide")
3
+
4
+ for name in dir():
5
+ if not name.startswith('_'):
6
+ del globals()[name]
7
+
8
+ import numpy as np
9
+ import pandas as pd
10
+ import streamlit as st
11
+ import gspread
12
+ import random
13
+ import gc
14
+
15
+ @st.cache_resource
16
+ def init_conn():
17
+ scope = ['https://www.googleapis.com/auth/spreadsheets',
18
+ "https://www.googleapis.com/auth/drive"]
19
+
20
+ credentials = {
21
+ "type": "service_account",
22
+ "project_id": "sheets-api-connect-378620",
23
+ "private_key_id": "1005124050c80d085e2c5b344345715978dd9cc9",
24
+ "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtKa01beXwc88R\nnPZVQTNPVQuBnbwoOfc66gW3547ja/UEyIGAF112dt/VqHprRafkKGmlg55jqJNt\na4zceLKV+wTm7vBu7lDISTJfGzCf2TrxQYNqwMKE2LOjI69dBM8u4Dcb4k0wcp9v\ntW1ZzLVVuwTvmrg7JBHjiSaB+x5wxm/r3FOiJDXdlAgFlytzqgcyeZMJVKKBQHyJ\njEGg/1720A0numuOCt71w/2G0bDmijuj1e6tH32MwRWcvRNZ19K9ssyDz2S9p68s\nYDhIxX69OWxwScTIHLY6J2t8txf/XMivL/636fPlDADvBEVTdlT606n8CcKUVQeq\npUVdG+lfAgMBAAECggEAP38SUA7B69eTfRpo658ycOs3Amr0JW4H/bb1rNeAul0K\nZhwd/HnU4E07y81xQmey5kN5ZeNrD5EvqkZvSyMJHV0EEahZStwhjCfnDB/cxyix\nZ+kFhv4y9eK+kFpUAhBy5nX6T0O+2T6WvzAwbmbVsZ+X8kJyPuF9m8ldcPlD0sce\ntj8NwVq1ys52eosqs7zi2vjt+eMcaY393l4ls+vNq8Yf27cfyFw45W45CH/97/Nu\n5AmuzlCOAfFF+z4OC5g4rei4E/Qgpxa7/uom+BVfv9G0DIGW/tU6Sne0+37uoGKt\nW6DzhgtebUtoYkG7ZJ05BTXGp2lwgVcNRoPwnKJDxQKBgQDT5wYPUBDW+FHbvZSp\nd1m1UQuXyerqOTA9smFaM8sr/UraeH85DJPEIEk8qsntMBVMhvD3Pw8uIUeFNMYj\naLmZFObsL+WctepXrVo5NB6RtLB/jZYxiKMatMLUJIYtcKIp+2z/YtKiWcLnwotB\nWdCjVnPTxpkurmF2fWP/eewZ+wKBgQDRMtJg7etjvKyjYNQ5fARnCc+XsI3gkBe1\nX9oeXfhyfZFeBXWnZzN1ITgFHplDznmBdxAyYGiQdbbkdKQSghviUQ0igBvoDMYy\n1rWcy+a17Mj98uyNEfmb3X2cC6WpvOZaGHwg9+GY67BThwI3FqHIbyk6Ko09WlTX\nQpRQjMzU7QKBgAfi1iflu+q0LR+3a3vvFCiaToskmZiD7latd9AKk2ocsBd3Woy9\n+hXXecJHPOKV4oUJlJgvAZqe5HGBqEoTEK0wyPNLSQlO/9ypd+0fEnArwFHO7CMF\nycQprAKHJXM1eOOFFuZeQCaInqdPZy1UcV5Szla4UmUZWkk1m24blHzXAoGBAMcA\nyH4qdbxX9AYrC1dvsSRvgcnzytMvX05LU0uF6tzGtG0zVlub4ahvpEHCfNuy44UT\nxRWW/oFFaWjjyFxO5sWggpUqNuHEnRopg3QXx22SRRTGbN45li/+QAocTkgsiRh1\nqEcYZsO4mPCsQqAy6E2p6RcK+Xa+omxvSnVhq0x1AoGAKr8GdkCl4CF6rieLMAQ7\nLNBuuoYGaHoh8l5E2uOQpzwxVy/nMBcAv+2+KqHEzHryUv1owOi6pMLv7A9mTFoS\n18B0QRLuz5fSOsVnmldfC9fpUc6H8cH1SINZpzajqQA74bPwELJjnzrCnH79TnHG\nJuElxA33rFEjbgbzdyrE768=\n-----END PRIVATE KEY-----\n",
25
+ "client_email": "gspread-connection@sheets-api-connect-378620.iam.gserviceaccount.com",
26
+ "client_id": "106625872877651920064",
27
+ "auth_uri": "https://accounts.google.com/o/oauth2/auth",
28
+ "token_uri": "https://oauth2.googleapis.com/token",
29
+ "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
30
+ "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40sheets-api-connect-378620.iam.gserviceaccount.com"
31
+ }
32
+
33
+ gc_con = gspread.service_account_from_dict(credentials)
34
+
35
+ return gc_con
36
+
37
+ gcservice_account = init_conn()
38
+
39
+ freq_format = {'Proj Own': '{:.2%}', 'Exposure': '{:.2%}', 'Edge': '{:.2%}'}
40
+
41
+ @st.cache_resource(ttl = 300)
42
+ def load_dk_player_projections():
43
+ sh = gcservice_account.open_by_url('https://docs.google.com/spreadsheets/d/1Yq0vGriWK-bS79e-bD6_u9pqrYE6Yrlbb_wEkmH-ot0/edit#gid=172632260')
44
+ worksheet = sh.worksheet('DK_Build_Up')
45
+ load_display = pd.DataFrame(worksheet.get_all_records())
46
+ load_display.replace('', np.nan, inplace=True)
47
+ raw_display = load_display.dropna(subset=['Median'])
48
+
49
+ return raw_display
50
+
51
+ @st.cache_resource(ttl = 300)
52
+ def load_fd_player_projections():
53
+ sh = gcservice_account.open_by_url('https://docs.google.com/spreadsheets/d/1Yq0vGriWK-bS79e-bD6_u9pqrYE6Yrlbb_wEkmH-ot0/edit#gid=172632260')
54
+ worksheet = sh.worksheet('FD_Build_Up')
55
+ load_display = pd.DataFrame(worksheet.get_all_records())
56
+ load_display.replace('', np.nan, inplace=True)
57
+ raw_display = load_display.dropna(subset=['Median'])
58
+
59
+ return raw_display
60
+
61
+ @st.cache_resource(ttl = 300)
62
+ def set_export_ids():
63
+ sh = gcservice_account.open_by_url('https://docs.google.com/spreadsheets/d/1Yq0vGriWK-bS79e-bD6_u9pqrYE6Yrlbb_wEkmH-ot0/edit#gid=172632260')
64
+ worksheet = sh.worksheet('DK_Salaries')
65
+ load_display = pd.DataFrame(worksheet.get_all_records())
66
+ load_display.replace('', np.nan, inplace=True)
67
+ raw_display = load_display.dropna(subset=['Median'])
68
+ dk_ids = dict(zip(raw_display['Player'], raw_display['player_id']))
69
+
70
+ worksheet = sh.worksheet('FD_Salaries')
71
+ load_display = pd.DataFrame(worksheet.get_all_records())
72
+ load_display.replace('', np.nan, inplace=True)
73
+ raw_display = load_display.dropna(subset=['Median'])
74
+ fd_ids = dict(zip(raw_display['Player'], raw_display['player_id']))
75
+
76
+ return dk_ids, fd_ids
77
+
78
+ dk_roo_raw = load_dk_player_projections()
79
+ fd_roo_raw = load_fd_player_projections()
80
+ t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
81
+ dkid_dict, fdid_dict = set_export_ids()
82
+
83
+ static_exposure = pd.DataFrame(columns=['Player', 'count'])
84
+ overall_exposure = pd.DataFrame(columns=['Player', 'count'])
85
+
86
+ def sim_contest(Sim_size, FinalPortfolio, CleanPortfolio, maps_dict, up_dict, insert_port):
87
+ SimVar = 1
88
+ Sim_Winners = []
89
+ fp_array = FinalPortfolio.values
90
+
91
+ if insert_port == 1:
92
+ up_array = CleanPortfolio.values
93
+
94
+ # Pre-vectorize functions
95
+ vec_projection_map = np.vectorize(maps_dict['Projection_map'].__getitem__)
96
+ vec_stdev_map = np.vectorize(maps_dict['STDev_map'].__getitem__)
97
+
98
+ if insert_port == 1:
99
+ vec_up_projection_map = np.vectorize(up_dict['Projection_map'].__getitem__)
100
+ vec_up_stdev_map = np.vectorize(up_dict['STDev_map'].__getitem__)
101
+
102
+ st.write('Simulating contest on frames')
103
+
104
+ while SimVar <= Sim_size:
105
+ if insert_port == 1:
106
+ fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size-len(CleanPortfolio))]
107
+ elif insert_port == 0:
108
+ fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size)]
109
+
110
+ sample_arrays1 = np.c_[
111
+ fp_random,
112
+ np.sum(np.random.normal(
113
+ loc=vec_projection_map(fp_random[:, :-5]),
114
+ scale=vec_stdev_map(fp_random[:, :-5])),
115
+ axis=1)
116
+ ]
117
+
118
+ if insert_port == 1:
119
+ sample_arrays2 = np.c_[
120
+ up_array,
121
+ np.sum(np.random.normal(
122
+ loc=vec_up_projection_map(up_array[:, :-5]),
123
+ scale=vec_up_stdev_map(up_array[:, :-5])),
124
+ axis=1)
125
+ ]
126
+ sample_arrays = np.vstack((sample_arrays1, sample_arrays2))
127
+ else:
128
+ sample_arrays = sample_arrays1
129
+
130
+ final_array = sample_arrays[sample_arrays[:, 9].argsort()[::-1]]
131
+ best_lineup = final_array[final_array[:, -1].argsort(kind='stable')[::-1][:1]]
132
+ Sim_Winners.append(best_lineup)
133
+ SimVar += 1
134
+
135
+ return Sim_Winners
136
+
137
+ def run_seed_frame(seed_depth1, Strength_var, strength_grow, Teams_used, Total_Runs, field_growth):
138
+ RunsVar = 1
139
+ seed_depth_def = seed_depth1
140
+ Strength_var_def = Strength_var
141
+ strength_grow_def = strength_grow
142
+ Teams_used_def = Teams_used
143
+ Total_Runs_def = Total_Runs
144
+
145
+ st.write('Creating Seed Frames')
146
+
147
+ while RunsVar <= seed_depth_def:
148
+ if RunsVar <= 3:
149
+ FieldStrength = Strength_var_def
150
+ FinalPortfolio, maps_dict = get_correlated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
151
+ FinalPortfolio2, maps_dict2 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
152
+ FinalPortfolio_init = pd.concat([FinalPortfolio, FinalPortfolio2], axis=0)
153
+ maps_dict.update(maps_dict2)
154
+ elif RunsVar > 3 and RunsVar <= 4:
155
+ FieldStrength += (strength_grow_def + ((30 - len(Teams_used_def)) * .001))
156
+ FinalPortfolio3, maps_dict3 = get_correlated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
157
+ FinalPortfolio4, maps_dict4 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
158
+ FinalPortfolio_merge_3 = pd.concat([FinalPortfolio_init, FinalPortfolio3], axis=0)
159
+ FinalPortfolio_merge_4 = pd.concat([FinalPortfolio_merge_3, FinalPortfolio4], axis=0)
160
+ FinalPortfolio_step_2 = FinalPortfolio_merge_4.drop_duplicates(subset = ['Projection', 'Own'],keep = 'last').reset_index(drop = True)
161
+ maps_dict.update(maps_dict3)
162
+ maps_dict.update(maps_dict4)
163
+ elif RunsVar > 4:
164
+ FieldStrength = 1
165
+ FinalPortfolio5, maps_dict5 = get_correlated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
166
+ FinalPortfolio6, maps_dict6 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
167
+ FinalPortfolio_merge_5 = pd.concat([FinalPortfolio_step_2, FinalPortfolio5], axis=0)
168
+ FinalPortfolio_merge_6 = pd.concat([FinalPortfolio_merge_5, FinalPortfolio6], axis=0)
169
+ FinalPortfolio_export = FinalPortfolio_merge_6.drop_duplicates(subset = ['Projection', 'Own'],keep = 'last').reset_index(drop = True)
170
+ maps_dict.update(maps_dict5)
171
+ maps_dict.update(maps_dict6)
172
+ RunsVar += 1
173
+
174
+ return FinalPortfolio_export, maps_dict
175
+
176
+ def create_overall_dfs(pos_players, table_name, dict_name, pos):
177
+ if pos == "UTIL":
178
+ pos_players = pos_players.sort_values(by='Value', ascending=False)
179
+ table_name_raw = pos_players.reset_index(drop=True)
180
+ overall_table_name = table_name_raw.head(round(len(table_name_raw)))
181
+ overall_table_name = overall_table_name.assign(Var = range(0,len(overall_table_name)))
182
+ overall_dict_name = pd.Series(overall_table_name.Player.values, index=overall_table_name.Var).to_dict()
183
+ elif pos != "UTIL":
184
+ table_name_raw = pos_players[pos_players['Position'].str.contains(pos)].reset_index(drop=True)
185
+ overall_table_name = table_name_raw.head(round(len(table_name_raw)))
186
+ overall_table_name = overall_table_name.assign(Var = range(0,len(overall_table_name)))
187
+ overall_dict_name = pd.Series(overall_table_name.Player.values, index=overall_table_name.Var).to_dict()
188
+
189
+ return overall_table_name, overall_dict_name
190
+
191
+
192
+ def get_overall_merged_df():
193
+ ref_dict = {
194
+ 'pos':['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL'],
195
+ 'pos_dfs':['PG_Table', 'SG_Table', 'SF_Table', 'PF_Table', 'C_Table', 'G_Table', 'F_Table', 'UTIL_Table'],
196
+ 'pos_dicts':['pg_dict', 'sg_dict', 'sf_dict', 'pf_dict', 'c_dict', 'g_dict', 'f_dict', 'util_dict']
197
+ }
198
+
199
+ for i in range(0,8):
200
+ ref_dict['pos_dfs'][i], ref_dict['pos_dicts'][i] =\
201
+ create_overall_dfs(pos_players, ref_dict['pos_dfs'][i], ref_dict['pos_dicts'][i], ref_dict['pos'][i])
202
+
203
+ df_out = pd.concat(ref_dict['pos_dfs'], ignore_index=True)
204
+
205
+ return ref_dict
206
+
207
+ def calculate_range_var(count, min_val, FieldStrength, field_growth):
208
+ var = round(len(count[0]) * FieldStrength)
209
+ var = max(var, min_val)
210
+ var += round(field_growth)
211
+
212
+ return min(var, len(count[0]))
213
+
214
+ def create_random_portfolio(Total_Sample_Size, raw_baselines, field_growth):
215
+
216
+ full_pos_player_dict = get_overall_merged_df()
217
+
218
+ field_growth_rounded = round(field_growth)
219
+ ranges_dict = {}
220
+
221
+ # Calculate ranges
222
+ for df, dict_val, min_val, key in zip(ref_dict['pos_dfs'], ref_dict['pos_dicts'],
223
+ [20, 15, 15, 20, 20, 30, 30, 50], ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']):
224
+ count = create_overall_dfs(pos_players, df, dict_val, key)
225
+ ranges_dict[f"{key.lower()}_range"] = calculate_range_var(count, min_val, FieldStrength, field_growth_rounded)
226
+
227
+ # Generate random portfolios
228
+ rng = np.random.default_rng()
229
+ total_elements = [1, 1, 1, 1, 1, 1, 1, 1]
230
+ keys = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']
231
+
232
+ all_choices = [rng.choice(ranges_dict[f"{key}_range"], size=(Total_Sample_Size, elem)) for key, elem in zip(keys, total_elements)]
233
+ RandomPortfolio = pd.DataFrame(np.hstack(all_choices), columns=['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL'])
234
+ RandomPortfolio['User/Field'] = 0
235
+
236
+ return RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict
237
+
238
+ def get_correlated_portfolio_for_sim(Total_Sample_Size, sharp_split, field_growth):
239
+
240
+ sizesplit = round(Total_Sample_Size * sharp_split)
241
+
242
+ RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict = create_random_portfolio(sizesplit, raw_baselines, field_growth)
243
+
244
+ RandomPortfolio['PG'] = pd.Series(list(RandomPortfolio['PG'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
245
+ RandomPortfolio['SG'] = pd.Series(list(RandomPortfolio['SG'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
246
+ RandomPortfolio['SF'] = pd.Series(list(RandomPortfolio['SF'].map(full_pos_player_dict['pos_dicts'][2])), dtype="string[pyarrow]")
247
+ RandomPortfolio['PF'] = pd.Series(list(RandomPortfolio['PF'].map(full_pos_player_dict['pos_dicts'][3])), dtype="string[pyarrow]")
248
+ RandomPortfolio['C'] = pd.Series(list(RandomPortfolio['C'].map(full_pos_player_dict['pos_dicts'][4])), dtype="string[pyarrow]")
249
+ RandomPortfolio['G'] = pd.Series(list(RandomPortfolio['G'].map(full_pos_player_dict['pos_dicts'][5])), dtype="string[pyarrow]")
250
+ RandomPortfolio['F'] = pd.Series(list(RandomPortfolio['F'].map(full_pos_player_dict['pos_dicts'][6])), dtype="string[pyarrow]")
251
+ RandomPortfolio['UTIL'] = pd.Series(list(RandomPortfolio['UTIL'].map(full_pos_player_dict['pos_dicts'][7])), dtype="string[pyarrow]")
252
+ RandomPortfolio['plyr_list'] = RandomPortfolio[RandomPortfolio.columns.values.tolist()].values.tolist()
253
+ RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x)))
254
+ RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 9].drop(columns=['plyr_list','plyr_count']).\
255
+ reset_index(drop=True)
256
+
257
+ RandomPortfolio['PGs'] = RandomPortfolio['PG'].map(maps_dict['Salary_map']).astype(np.int32)
258
+ RandomPortfolio['SGs'] = RandomPortfolio['SG'].map(maps_dict['Salary_map']).astype(np.int32)
259
+ RandomPortfolio['SFs'] = RandomPortfolio['SF'].map(maps_dict['Salary_map']).astype(np.int32)
260
+ RandomPortfolio['PFs'] = RandomPortfolio['PF'].map(maps_dict['Salary_map']).astype(np.int32)
261
+ RandomPortfolio['Cs'] = RandomPortfolio['C'].map(maps_dict['Salary_map']).astype(np.int32)
262
+ RandomPortfolio['Gs'] = RandomPortfolio['G'].map(maps_dict['Salary_map']).astype(np.int32)
263
+ RandomPortfolio['Fs'] = RandomPortfolio['F'].map(maps_dict['Salary_map']).astype(np.int32)
264
+ RandomPortfolio['UTILs'] = RandomPortfolio['UTIL'].map(maps_dict['Salary_map']).astype(np.int32)
265
+
266
+ RandomPortfolio['PGp'] = RandomPortfolio['PG'].map(maps_dict['Projection_map']).astype(np.float16)
267
+ RandomPortfolio['SGp'] = RandomPortfolio['SG'].map(maps_dict['Projection_map']).astype(np.float16)
268
+ RandomPortfolio['SFp'] = RandomPortfolio['SF'].map(maps_dict['Projection_map']).astype(np.float16)
269
+ RandomPortfolio['PFp'] = RandomPortfolio['PF'].map(maps_dict['Projection_map']).astype(np.float16)
270
+ RandomPortfolio['Cp'] = RandomPortfolio['C'].map(maps_dict['Projection_map']).astype(np.float16)
271
+ RandomPortfolio['Gp'] = RandomPortfolio['G'].map(maps_dict['Projection_map']).astype(np.float16)
272
+ RandomPortfolio['Fp'] = RandomPortfolio['F'].map(maps_dict['Projection_map']).astype(np.float16)
273
+ RandomPortfolio['UTILp'] = RandomPortfolio['UTIL'].map(maps_dict['Projection_map']).astype(np.float16)
274
+ RandomPortfolio['DSTp'] = RandomPortfolio['DST'].map(maps_dict['Projection_map']).astype(np.float16)
275
+
276
+ RandomPortfolio['PGo'] = RandomPortfolio['PG'].map(maps_dict['Own_map']).astype(np.float16)
277
+ RandomPortfolio['SGo'] = RandomPortfolio['SG'].map(maps_dict['Own_map']).astype(np.float16)
278
+ RandomPortfolio['SFo'] = RandomPortfolio['SF'].map(maps_dict['Own_map']).astype(np.float16)
279
+ RandomPortfolio['PFo'] = RandomPortfolio['PF'].map(maps_dict['Own_map']).astype(np.float16)
280
+ RandomPortfolio['Co'] = RandomPortfolio['C'].map(maps_dict['Own_map']).astype(np.float16)
281
+ RandomPortfolio['Go'] = RandomPortfolio['G'].map(maps_dict['Own_map']).astype(np.float16)
282
+ RandomPortfolio['Fo'] = RandomPortfolio['F'].map(maps_dict['Own_map']).astype(np.float16)
283
+ RandomPortfolio['UTILo'] = RandomPortfolio['UTIL'].map(maps_dict['Own_map']).astype(np.float16)
284
+ RandomPortfolio['DSTo'] = RandomPortfolio['DST'].map(maps_dict['Own_map']).astype(np.float16)
285
+
286
+ RandomPortArray = RandomPortfolio.to_numpy()
287
+
288
+ RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,9:17].astype(int))]
289
+ RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,17:25].astype(np.double))]
290
+ RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,25:33].astype(np.double))]
291
+
292
+ RandomPortArrayOut = np.delete(RandomPortArray, np.s_[9:33], axis=1)
293
+ RandomPortfolioDF = pd.DataFrame(RandomPortArrayOut, columns = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL', 'User/Field', 'Salary', 'Projection', 'Own'])
294
+ RandomPortfolioDF = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
295
+
296
+ if insert_port == 1:
297
+ CleanPortfolio['Salary'] = sum([CleanPortfolio['PG'].map(maps_dict['Salary_map']),
298
+ CleanPortfolio['SG'].map(maps_dict['Salary_map']),
299
+ CleanPortfolio['SF'].map(maps_dict['Salary_map']),
300
+ CleanPortfolio['PF'].map(maps_dict['Salary_map']),
301
+ CleanPortfolio['C'].map(maps_dict['Salary_map']),
302
+ CleanPortfolio['G'].map(maps_dict['Salary_map']),
303
+ CleanPortfolio['F'].map(maps_dict['Salary_map']),
304
+ CleanPortfolio['UTIL'].map(maps_dict['Salary_map'])
305
+ ]).astype(np.int16)
306
+ if insert_port == 1:
307
+ CleanPortfolio['Projection'] = sum([CleanPortfolio['PG'].map(maps_dict['Projection_map']),
308
+ CleanPortfolio['SG'].map(maps_dict['Projection_map']),
309
+ CleanPortfolio['SF'].map(maps_dict['Projection_map']),
310
+ CleanPortfolio['PF'].map(maps_dict['Projection_map']),
311
+ CleanPortfolio['C'].map(maps_dict['Projection_map']),
312
+ CleanPortfolio['G'].map(maps_dict['Projection_map']),
313
+ CleanPortfolio['F'].map(maps_dict['Projection_map']),
314
+ CleanPortfolio['UTIL'].map(maps_dict['Projection_map'])
315
+ ]).astype(np.float16)
316
+ if insert_port == 1:
317
+ CleanPortfolio['Own'] = sum([CleanPortfolio['PG'].map(maps_dict['Own_map']),
318
+ CleanPortfolio['SG'].map(maps_dict['Own_map']),
319
+ CleanPortfolio['SF'].map(maps_dict['Own_map']),
320
+ CleanPortfolio['PF'].map(maps_dict['Own_map']),
321
+ CleanPortfolio['C'].map(maps_dict['Own_map']),
322
+ CleanPortfolio['G'].map(maps_dict['Own_map']),
323
+ CleanPortfolio['F'].map(maps_dict['Own_map']),
324
+ CleanPortfolio['UTIL'].map(maps_dict['Own_map'])
325
+ ]).astype(np.float16)
326
+
327
+ if site_var1 == 'Draftkings':
328
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 50000].reset_index(drop=True)
329
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (49500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
330
+ elif site_var1 == 'Fanduel':
331
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 60000].reset_index(drop=True)
332
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (59500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
333
+
334
+ RandomPortfolio = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
335
+
336
+ RandomPortfolio = RandomPortfolio[['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL', 'User/Field', 'Salary', 'Projection', 'Own']]
337
+
338
+ return RandomPortfolio, maps_dict
339
+
340
+ def get_uncorrelated_portfolio_for_sim(Total_Sample_Size, sharp_split, field_growth):
341
+
342
+ sizesplit = round(Total_Sample_Size * sharp_split)
343
+
344
+ RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict = create_random_portfolio(sizesplit, raw_baselines, field_growth)
345
+
346
+ RandomPortfolio['PG'] = pd.Series(list(RandomPortfolio['PG'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
347
+ RandomPortfolio['SG'] = pd.Series(list(RandomPortfolio['SG'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
348
+ RandomPortfolio['SF'] = pd.Series(list(RandomPortfolio['SF'].map(full_pos_player_dict['pos_dicts'][2])), dtype="string[pyarrow]")
349
+ RandomPortfolio['PF'] = pd.Series(list(RandomPortfolio['PF'].map(full_pos_player_dict['pos_dicts'][3])), dtype="string[pyarrow]")
350
+ RandomPortfolio['C'] = pd.Series(list(RandomPortfolio['C'].map(full_pos_player_dict['pos_dicts'][4])), dtype="string[pyarrow]")
351
+ RandomPortfolio['G'] = pd.Series(list(RandomPortfolio['G'].map(full_pos_player_dict['pos_dicts'][5])), dtype="string[pyarrow]")
352
+ RandomPortfolio['F'] = pd.Series(list(RandomPortfolio['F'].map(full_pos_player_dict['pos_dicts'][6])), dtype="string[pyarrow]")
353
+ RandomPortfolio['UTIL'] = pd.Series(list(RandomPortfolio['UTIL'].map(full_pos_player_dict['pos_dicts'][7])), dtype="string[pyarrow]")
354
+ RandomPortfolio['plyr_list'] = RandomPortfolio[RandomPortfolio.columns.values.tolist()].values.tolist()
355
+ RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x)))
356
+ RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 9].drop(columns=['plyr_list','plyr_count']).\
357
+ reset_index(drop=True)
358
+
359
+ RandomPortfolio['PGs'] = RandomPortfolio['PG'].map(maps_dict['Salary_map']).astype(np.int32)
360
+ RandomPortfolio['SGs'] = RandomPortfolio['SG'].map(maps_dict['Salary_map']).astype(np.int32)
361
+ RandomPortfolio['SFs'] = RandomPortfolio['SF'].map(maps_dict['Salary_map']).astype(np.int32)
362
+ RandomPortfolio['PFs'] = RandomPortfolio['PF'].map(maps_dict['Salary_map']).astype(np.int32)
363
+ RandomPortfolio['Cs'] = RandomPortfolio['C'].map(maps_dict['Salary_map']).astype(np.int32)
364
+ RandomPortfolio['Gs'] = RandomPortfolio['G'].map(maps_dict['Salary_map']).astype(np.int32)
365
+ RandomPortfolio['Fs'] = RandomPortfolio['F'].map(maps_dict['Salary_map']).astype(np.int32)
366
+ RandomPortfolio['UTILs'] = RandomPortfolio['UTIL'].map(maps_dict['Salary_map']).astype(np.int32)
367
+
368
+ RandomPortfolio['PGp'] = RandomPortfolio['PG'].map(maps_dict['Projection_map']).astype(np.float16)
369
+ RandomPortfolio['SGp'] = RandomPortfolio['SG'].map(maps_dict['Projection_map']).astype(np.float16)
370
+ RandomPortfolio['SFp'] = RandomPortfolio['SF'].map(maps_dict['Projection_map']).astype(np.float16)
371
+ RandomPortfolio['PFp'] = RandomPortfolio['PF'].map(maps_dict['Projection_map']).astype(np.float16)
372
+ RandomPortfolio['Cp'] = RandomPortfolio['C'].map(maps_dict['Projection_map']).astype(np.float16)
373
+ RandomPortfolio['Gp'] = RandomPortfolio['G'].map(maps_dict['Projection_map']).astype(np.float16)
374
+ RandomPortfolio['Fp'] = RandomPortfolio['F'].map(maps_dict['Projection_map']).astype(np.float16)
375
+ RandomPortfolio['UTILp'] = RandomPortfolio['UTIL'].map(maps_dict['Projection_map']).astype(np.float16)
376
+ RandomPortfolio['DSTp'] = RandomPortfolio['DST'].map(maps_dict['Projection_map']).astype(np.float16)
377
+
378
+ RandomPortfolio['PGo'] = RandomPortfolio['PG'].map(maps_dict['Own_map']).astype(np.float16)
379
+ RandomPortfolio['SGo'] = RandomPortfolio['SG'].map(maps_dict['Own_map']).astype(np.float16)
380
+ RandomPortfolio['SFo'] = RandomPortfolio['SF'].map(maps_dict['Own_map']).astype(np.float16)
381
+ RandomPortfolio['PFo'] = RandomPortfolio['PF'].map(maps_dict['Own_map']).astype(np.float16)
382
+ RandomPortfolio['Co'] = RandomPortfolio['C'].map(maps_dict['Own_map']).astype(np.float16)
383
+ RandomPortfolio['Go'] = RandomPortfolio['G'].map(maps_dict['Own_map']).astype(np.float16)
384
+ RandomPortfolio['Fo'] = RandomPortfolio['F'].map(maps_dict['Own_map']).astype(np.float16)
385
+ RandomPortfolio['UTILo'] = RandomPortfolio['UTIL'].map(maps_dict['Own_map']).astype(np.float16)
386
+ RandomPortfolio['DSTo'] = RandomPortfolio['DST'].map(maps_dict['Own_map']).astype(np.float16)
387
+
388
+ RandomPortArray = RandomPortfolio.to_numpy()
389
+
390
+ RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,9:17].astype(int))]
391
+ RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,17:25].astype(np.double))]
392
+ RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,25:33].astype(np.double))]
393
+
394
+ RandomPortArrayOut = np.delete(RandomPortArray, np.s_[9:33], axis=1)
395
+ RandomPortfolioDF = pd.DataFrame(RandomPortArrayOut, columns = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL', 'User/Field', 'Salary', 'Projection', 'Own'])
396
+ RandomPortfolioDF = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
397
+
398
+ if insert_port == 1:
399
+ CleanPortfolio['Salary'] = sum([CleanPortfolio['PG'].map(maps_dict['Salary_map']),
400
+ CleanPortfolio['SG'].map(maps_dict['Salary_map']),
401
+ CleanPortfolio['SF'].map(maps_dict['Salary_map']),
402
+ CleanPortfolio['PF'].map(maps_dict['Salary_map']),
403
+ CleanPortfolio['C'].map(maps_dict['Salary_map']),
404
+ CleanPortfolio['G'].map(maps_dict['Salary_map']),
405
+ CleanPortfolio['F'].map(maps_dict['Salary_map']),
406
+ CleanPortfolio['UTIL'].map(maps_dict['Salary_map'])
407
+ ]).astype(np.int16)
408
+ if insert_port == 1:
409
+ CleanPortfolio['Projection'] = sum([CleanPortfolio['PG'].map(maps_dict['Projection_map']),
410
+ CleanPortfolio['SG'].map(maps_dict['Projection_map']),
411
+ CleanPortfolio['SF'].map(maps_dict['Projection_map']),
412
+ CleanPortfolio['PF'].map(maps_dict['Projection_map']),
413
+ CleanPortfolio['C'].map(maps_dict['Projection_map']),
414
+ CleanPortfolio['G'].map(maps_dict['Projection_map']),
415
+ CleanPortfolio['F'].map(maps_dict['Projection_map']),
416
+ CleanPortfolio['UTIL'].map(maps_dict['Projection_map'])
417
+ ]).astype(np.float16)
418
+ if insert_port == 1:
419
+ CleanPortfolio['Own'] = sum([CleanPortfolio['PG'].map(maps_dict['Own_map']),
420
+ CleanPortfolio['SG'].map(maps_dict['Own_map']),
421
+ CleanPortfolio['SF'].map(maps_dict['Own_map']),
422
+ CleanPortfolio['PF'].map(maps_dict['Own_map']),
423
+ CleanPortfolio['C'].map(maps_dict['Own_map']),
424
+ CleanPortfolio['G'].map(maps_dict['Own_map']),
425
+ CleanPortfolio['F'].map(maps_dict['Own_map']),
426
+ CleanPortfolio['UTIL'].map(maps_dict['Own_map'])
427
+ ]).astype(np.float16)
428
+
429
+ if site_var1 == 'Draftkings':
430
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 50000].reset_index(drop=True)
431
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (49500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
432
+ elif site_var1 == 'Fanduel':
433
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 60000].reset_index(drop=True)
434
+ RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (59500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
435
+
436
+ RandomPortfolio = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
437
+
438
+ RandomPortfolio = RandomPortfolio[['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL', 'User/Field', 'Salary', 'Projection', 'Own']]
439
+
440
+ return RandomPortfolio, maps_dict
441
+
442
+ tab1, tab2 = st.tabs(['Uploads', 'Contest Sim'])
443
+
444
+ with tab1:
445
+ with st.container():
446
+ col1, col2 = st.columns([3, 3])
447
+
448
+ with col1:
449
+ st.info("The Projections file can have any columns in any order, but must contain columns explicitly named: 'Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', and 'Own'. Upload your projections first to avoid an error message.")
450
+ proj_file = st.file_uploader("Upload Projections File", key = 'proj_uploader')
451
+
452
+ if proj_file is not None:
453
+ try:
454
+ proj_dataframe = pd.read_csv(proj_file)
455
+ proj_dataframe = proj_dataframe.dropna(subset='Median')
456
+ proj_dataframe['Player'] = proj_dataframe['Player'].str.strip()
457
+ try:
458
+ proj_dataframe['Own'] = proj_dataframe['Own'].str.strip('%').astype(float)
459
+ except:
460
+ pass
461
+
462
+ except:
463
+ proj_dataframe = pd.read_excel(proj_file)
464
+ proj_dataframe = proj_dataframe.dropna(subset='Median')
465
+ proj_dataframe['Player'] = proj_dataframe['Player'].str.strip()
466
+ try:
467
+ proj_dataframe['Own'] = proj_dataframe['Own'].str.strip('%').astype(float)
468
+ except:
469
+ pass
470
+ st.table(proj_dataframe.head(10))
471
+ player_salary_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Salary))
472
+ player_proj_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Median))
473
+ player_own_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Own))
474
+
475
+ with col2:
476
+ st.info("The Portfolio file must contain only columns in order and explicitly named: 'PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', and 'UTIL'. Upload your projections first to avoid an error message.")
477
+ portfolio_file = st.file_uploader("Upload Portfolio File", key = 'portfolio_uploader')
478
+
479
+ if portfolio_file is not None:
480
+ try:
481
+ portfolio_dataframe = pd.read_csv(portfolio_file)
482
+
483
+ except:
484
+ portfolio_dataframe = pd.read_excel(portfolio_file)
485
+
486
+ try:
487
+ try:
488
+ portfolio_dataframe.columns=['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']
489
+ split_portfolio = portfolio_dataframe
490
+ split_portfolio[['PG', 'PG_ID']] = split_portfolio.PG.str.split("(", n=1, expand = True)
491
+ split_portfolio[['SG', 'SG_ID']] = split_portfolio.SG.str.split("(", n=1, expand = True)
492
+ split_portfolio[['SF', 'SF_ID']] = split_portfolio.SF.str.split("(", n=1, expand = True)
493
+ split_portfolio[['PF', 'PF_ID']] = split_portfolio.PF.str.split("(", n=1, expand = True)
494
+ split_portfolio[['C', 'C_ID']] = split_portfolio.C.str.split("(", n=1, expand = True)
495
+ split_portfolio[['G', 'G_ID']] = split_portfolio.G.str.split("(", n=1, expand = True)
496
+ split_portfolio[['F', 'F_ID']] = split_portfolio.F.str.split("(", n=1, expand = True)
497
+ split_portfolio[['UTIL', 'UTIL_ID']] = split_portfolio.UTIL.str.split("(", n=1, expand = True)
498
+
499
+ split_portfolio['PG'] = split_portfolio['PG'].str.strip()
500
+ split_portfolio['SG'] = split_portfolio['SG'].str.strip()
501
+ split_portfolio['SF'] = split_portfolio['SF'].str.strip()
502
+ split_portfolio['PF'] = split_portfolio['PF'].str.strip()
503
+ split_portfolio['C'] = split_portfolio['C'].str.strip()
504
+ split_portfolio['G'] = split_portfolio['G'].str.strip()
505
+ split_portfolio['F'] = split_portfolio['F'].str.strip()
506
+ split_portfolio['UTIL'] = split_portfolio['UTIL'].str.strip()
507
+
508
+ split_portfolio['Salary'] = sum([split_portfolio['PG'].map(player_salary_dict),
509
+ split_portfolio['SG'].map(player_salary_dict),
510
+ split_portfolio['SF'].map(player_salary_dict),
511
+ split_portfolio['PF'].map(player_salary_dict),
512
+ split_portfolio['C'].map(player_salary_dict),
513
+ split_portfolio['G'].map(player_salary_dict),
514
+ split_portfolio['F'].map(player_salary_dict),
515
+ split_portfolio['UTIL'].map(player_salary_dict)])
516
+
517
+ split_portfolio['Projection'] = sum([split_portfolio['PG'].map(player_proj_dict),
518
+ split_portfolio['SG'].map(player_proj_dict),
519
+ split_portfolio['SF'].map(player_proj_dict),
520
+ split_portfolio['PF'].map(player_proj_dict),
521
+ split_portfolio['C'].map(player_proj_dict),
522
+ split_portfolio['G'].map(player_proj_dict),
523
+ split_portfolio['F'].map(player_proj_dict),
524
+ split_portfolio['UTIL'].map(player_proj_dict)])
525
+
526
+ split_portfolio['Ownership'] = sum([split_portfolio['PG'].map(player_own_dict),
527
+ split_portfolio['SG'].map(player_own_dict),
528
+ split_portfolio['SF'].map(player_own_dict),
529
+ split_portfolio['PF'].map(player_own_dict),
530
+ split_portfolio['C'].map(player_own_dict),
531
+ split_portfolio['G'].map(player_own_dict),
532
+ split_portfolio['F'].map(player_own_dict),
533
+ split_portfolio['UTIL'].map(player_own_dict)])
534
+
535
+ st.table(split_portfolio.head(10))
536
+
537
+
538
+ except:
539
+ portfolio_dataframe.columns=['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']
540
+
541
+ split_portfolio = portfolio_dataframe
542
+ split_portfolio[['PG_ID', 'PG']] = split_portfolio.PG.str.split(":", n=1, expand = True)
543
+ split_portfolio[['SG_ID', 'SG']] = split_portfolio.SG.str.split(":", n=1, expand = True)
544
+ split_portfolio[['SF_ID', 'SF']] = split_portfolio.SF.str.split(":", n=1, expand = True)
545
+ split_portfolio[['PF_ID', 'PF']] = split_portfolio.PF.str.split(":", n=1, expand = True)
546
+ split_portfolio[['C_ID', 'C']] = split_portfolio.C.str.split(":", n=1, expand = True)
547
+ split_portfolio[['G_ID', 'G']] = split_portfolio.G.str.split(":", n=1, expand = True)
548
+ split_portfolio[['F_ID', 'F']] = split_portfolio.F.str.split(":", n=1, expand = True)
549
+ split_portfolio[['UTIL_ID', 'UTIL']] = split_portfolio.UTIL.str.split(":", n=1, expand = True)
550
+
551
+ split_portfolio['PG'] = split_portfolio['PG'].str.strip()
552
+ split_portfolio['SG'] = split_portfolio['SG'].str.strip()
553
+ split_portfolio['SF'] = split_portfolio['SF'].str.strip()
554
+ split_portfolio['PF'] = split_portfolio['PF'].str.strip()
555
+ split_portfolio['C'] = split_portfolio['C'].str.strip()
556
+ split_portfolio['G'] = split_portfolio['G'].str.strip()
557
+ split_portfolio['F'] = split_portfolio['F'].str.strip()
558
+ split_portfolio['UTIL'] = split_portfolio['UTIL'].str.strip()
559
+
560
+ split_portfolio['Salary'] = sum([split_portfolio['PG'].map(player_salary_dict),
561
+ split_portfolio['SG'].map(player_salary_dict),
562
+ split_portfolio['SF'].map(player_salary_dict),
563
+ split_portfolio['PF'].map(player_salary_dict),
564
+ split_portfolio['C'].map(player_salary_dict),
565
+ split_portfolio['G'].map(player_salary_dict),
566
+ split_portfolio['F'].map(player_salary_dict),
567
+ split_portfolio['UTIL'].map(player_salary_dict)])
568
+
569
+ split_portfolio['Projection'] = sum([split_portfolio['PG'].map(player_proj_dict),
570
+ split_portfolio['SG'].map(player_proj_dict),
571
+ split_portfolio['SF'].map(player_proj_dict),
572
+ split_portfolio['PF'].map(player_proj_dict),
573
+ split_portfolio['C'].map(player_proj_dict),
574
+ split_portfolio['G'].map(player_proj_dict),
575
+ split_portfolio['F'].map(player_proj_dict),
576
+ split_portfolio['UTIL'].map(player_proj_dict)])
577
+
578
+
579
+ split_portfolio['Ownership'] = sum([split_portfolio['PG'].map(player_own_dict),
580
+ split_portfolio['SG'].map(player_own_dict),
581
+ split_portfolio['SF'].map(player_own_dict),
582
+ split_portfolio['PF'].map(player_own_dict),
583
+ split_portfolio['C'].map(player_own_dict),
584
+ split_portfolio['G'].map(player_own_dict),
585
+ split_portfolio['F'].map(player_own_dict),
586
+ split_portfolio['UTIL'].map(player_own_dict)])
587
+
588
+ st.table(split_portfolio.head(10))
589
+
590
+ except:
591
+ split_portfolio = portfolio_dataframe
592
+
593
+ split_portfolio['Salary'] = sum([split_portfolio['PG'].map(player_salary_dict),
594
+ split_portfolio['SG'].map(player_salary_dict),
595
+ split_portfolio['SF'].map(player_salary_dict),
596
+ split_portfolio['PF'].map(player_salary_dict),
597
+ split_portfolio['C'].map(player_salary_dict),
598
+ split_portfolio['G'].map(player_salary_dict),
599
+ split_portfolio['F'].map(player_salary_dict),
600
+ split_portfolio['UTIL'].map(player_salary_dict)])
601
+
602
+ split_portfolio['Projection'] = sum([split_portfolio['PG'].map(player_proj_dict),
603
+ split_portfolio['SG'].map(player_proj_dict),
604
+ split_portfolio['SF'].map(player_proj_dict),
605
+ split_portfolio['PF'].map(player_proj_dict),
606
+ split_portfolio['C'].map(player_proj_dict),
607
+ split_portfolio['G'].map(player_proj_dict),
608
+ split_portfolio['F'].map(player_proj_dict),
609
+ split_portfolio['UTIL'].map(player_proj_dict)])
610
+
611
+
612
+ split_portfolio['Ownership'] = sum([split_portfolio['PG'].map(player_own_dict),
613
+ split_portfolio['SG'].map(player_own_dict),
614
+ split_portfolio['SF'].map(player_own_dict),
615
+ split_portfolio['PF'].map(player_own_dict),
616
+ split_portfolio['C'].map(player_own_dict),
617
+ split_portfolio['G'].map(player_own_dict),
618
+ split_portfolio['F'].map(player_own_dict),
619
+ split_portfolio['UTIL'].map(player_own_dict)])
620
+
621
+ gc.collect()
622
+
623
+ with tab2:
624
+ col1, col2 = st.columns([1, 7])
625
+ with col1:
626
+ st.info(t_stamp)
627
+ if st.button("Load/Reset Data", key='reset1'):
628
+ st.cache_data.clear()
629
+ for key in st.session_state.keys():
630
+ del st.session_state[key]
631
+ dk_roo_raw = load_dk_player_projections()
632
+ fd_roo_raw = load_fd_player_projections()
633
+ t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
634
+ dkid_dict, fdid_dict = set_export_ids()
635
+
636
+ slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'User'))
637
+ site_var1 = st.radio("What site are you working with?", ('Draftkings', 'Fanduel'))
638
+ if site_var1 == 'Draftkings':
639
+ if slate_var1 == 'User':
640
+ raw_baselines = proj_dataframe[['Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', 'Own']]
641
+ elif slate_var1 != 'User':
642
+ raw_baselines = dk_roo_raw
643
+ elif site_var1 == 'Fanduel':
644
+ if slate_var1 == 'User':
645
+ raw_baselines = proj_dataframe[['Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', 'Own']]
646
+ elif slate_var1 != 'User':
647
+ raw_baselines = fd_roo_raw
648
+
649
+ st.info("If you are uploading a portfolio, note that there is an adjustments to projections and deviation mapping to prevent 'Projection Bias' and create a fair simulation")
650
+ insert_port1 = st.selectbox("Are you uploading a portfolio?", ('No', 'Yes'), key='insert_port1')
651
+ if insert_port1 == 'Yes':
652
+ insert_port = 1
653
+ elif insert_port1 == 'No':
654
+ insert_port = 0
655
+ contest_var1 = st.selectbox("What contest size are you simulating?", ('Small', 'Medium', 'Large'))
656
+ if contest_var1 == 'Small':
657
+ Contest_Size = 500
658
+ elif contest_var1 == 'Medium':
659
+ Contest_Size = 2500
660
+ elif contest_var1 == 'Large':
661
+ Contest_Size = 5000
662
+ strength_var1 = st.selectbox("How sharp is the field in the contest?", ('Not Very', 'Average', 'Very'))
663
+ if strength_var1 == 'Not Very':
664
+ sharp_split = .33
665
+ Strength_var = .50
666
+ scaling_var = 5
667
+ elif strength_var1 == 'Average':
668
+ sharp_split = .50
669
+ Strength_var = .25
670
+ scaling_var = 10
671
+ elif strength_var1 == 'Very':
672
+ sharp_split = .75
673
+ Strength_var = .01
674
+ scaling_var = 15
675
+
676
+ Sort_function = 'Median'
677
+ Sim_function = 'Projection'
678
+
679
+ if Contest_Size <= 1000:
680
+ strength_grow = .01
681
+ elif Contest_Size > 1000 and Contest_Size <= 2500:
682
+ strength_grow = .025
683
+ elif Contest_Size > 2500 and Contest_Size <= 5000:
684
+ strength_grow = .05
685
+ elif Contest_Size > 5000 and Contest_Size <= 20000:
686
+ strength_grow = .075
687
+ elif Contest_Size > 20000:
688
+ strength_grow = .1
689
+
690
+ field_growth = 100 * strength_grow
691
+
692
+ with col2:
693
+ with st.container():
694
+ if st.button("Simulate Contest"):
695
+ with st.container():
696
+ for key in st.session_state.keys():
697
+ del st.session_state[key]
698
+
699
+ if slate_var1 == 'User':
700
+ initial_proj = proj_dataframe[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
701
+
702
+ # Define the calculation to be applied
703
+ def calculate_own(position, own, mean_own, factor, max_own=85):
704
+ return np.where((position == 'C') & (own - mean_own >= 0),
705
+ own * (factor * (own - mean_own) / 100) + mean_own,
706
+ own)
707
+
708
+ # Set the factors based on the contest_var1
709
+ factor_c, factor_other = {
710
+ 'Small': (10, 5),
711
+ 'Medium': (6, 3),
712
+ 'Large': (3, 1.5),
713
+ }[contest_var1]
714
+
715
+ # Apply the calculation to the DataFrame
716
+ initial_proj['Own%'] = initial_proj.apply(lambda row: calculate_own(row['Position'], row['Own'], initial_proj.loc[initial_proj['Position'] == row['Position'], 'Own'].mean(), factor_c if row['Position'] == 'C' else factor_other), axis=1)
717
+ initial_proj['Own%'] = initial_proj['Own%'].clip(upper=85)
718
+ initial_proj['Own'] = initial_proj['Own%'] * (800 / initial_proj['Own%'].sum())
719
+
720
+ # Drop unnecessary columns and create the final DataFrame
721
+ Overall_Proj = initial_proj[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
722
+
723
+ elif slate_var1 != 'User':
724
+ # Copy only the necessary columns
725
+ initial_proj = raw_baselines[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
726
+
727
+ # Define the calculation to be applied
728
+ def calculate_own(position, own, mean_own, factor, max_own=85):
729
+ return np.where((position == 'C') & (own - mean_own >= 0),
730
+ own * (factor * (own - mean_own) / 100) + mean_own,
731
+ own)
732
+
733
+ # Set the factors based on the contest_var1
734
+ factor_c, factor_other = {
735
+ 'Small': (10, 5),
736
+ 'Medium': (6, 3),
737
+ 'Large': (3, 1.5),
738
+ }[contest_var1]
739
+
740
+ # Apply the calculation to the DataFrame
741
+ initial_proj['Own%'] = initial_proj.apply(lambda row: calculate_own(row['Position'], row['Own'], initial_proj.loc[initial_proj['Position'] == row['Position'], 'Own'].mean(), factor_c if row['Position'] == 'C' else factor_other), axis=1)
742
+ initial_proj['Own%'] = initial_proj['Own%'].clip(upper=85)
743
+ initial_proj['Own'] = initial_proj['Own%'] * (800 / initial_proj['Own%'].sum())
744
+
745
+ # Drop unnecessary columns and create the final DataFrame
746
+ Overall_Proj = initial_proj[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
747
+
748
+ if insert_port == 1:
749
+ UserPortfolio = portfolio_dataframe[['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']]
750
+ elif insert_port == 0:
751
+ UserPortfolio = pd.DataFrame(columns = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL'])
752
+
753
+ Overall_Proj.replace('', np.nan, inplace=True)
754
+ Overall_Proj = Overall_Proj.dropna(subset=['Median'])
755
+ Overall_Proj = Overall_Proj.assign(Value=lambda x: (x.Median / (x.Salary / 1000)))
756
+ Overall_Proj['Sort_var'] = (Overall_Proj['Median'].rank(ascending=False) + Overall_Proj['Value'].rank(ascending=False)) / 2
757
+ Overall_Proj = Overall_Proj.sort_values(by='Sort_var', ascending=False)
758
+ Overall_Proj['Own'] = np.where((Overall_Proj['Median'] > 0) & (Overall_Proj['Own'] == 0), 1, Overall_Proj['Own'])
759
+ Overall_Proj = Overall_Proj.loc[Overall_Proj['Own'] > 0]
760
+
761
+ Overall_Proj['Floor'] = Overall_Proj['Median'] * .25
762
+ Overall_Proj['Ceiling'] = Overall_Proj['Median'] * 1.75
763
+ Overall_Proj['STDev'] = Overall_Proj['Median'] / 4
764
+
765
+ Teams_used = Overall_Proj['Team'].drop_duplicates().reset_index(drop=True)
766
+ Teams_used = Teams_used.reset_index()
767
+ Teams_used['team_item'] = Teams_used['index'] + 1
768
+ Teams_used = Teams_used.drop(columns=['index'])
769
+ Teams_used_dictraw = Teams_used.drop(columns=['team_item'])
770
+
771
+ team_list = Teams_used['Team'].to_list()
772
+ item_list = Teams_used['team_item'].to_list()
773
+
774
+ FieldStrength_raw = Strength_var + ((30 - len(Teams_used)) * .01)
775
+ FieldStrength = FieldStrength_raw - (FieldStrength_raw * (20000 / Contest_Size))
776
+
777
+ if FieldStrength < 0:
778
+ FieldStrength = Strength_var
779
+ field_split = Strength_var
780
+
781
+ for checkVar in range(len(team_list)):
782
+ Overall_Proj['Team'] = Overall_Proj['Team'].replace(team_list, item_list)
783
+
784
+ pgs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('PG')]
785
+ pgs_raw.dropna(subset=['Median']).reset_index(drop=True)
786
+ pgs_raw = pgs_raw.reset_index(drop=True)
787
+ pgs_raw = pgs_raw.sort_values(by=['Median'], ascending=False)
788
+
789
+ sgs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('SG')]
790
+ sgs_raw.dropna(subset=['Median']).reset_index(drop=True)
791
+ sgs_raw = sgs_raw.reset_index(drop=True)
792
+ sgs_raw = sgs_raw.sort_values(by=['Own', 'Value'], ascending=False)
793
+
794
+ sfs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('SF')]
795
+ sfs_raw.dropna(subset=['Median']).reset_index(drop=True)
796
+ sfs_raw = sfs_raw.reset_index(drop=True)
797
+ sfs_raw = sfs_raw.sort_values(by=['Own', 'Value'], ascending=False)
798
+
799
+ pfs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('PF')]
800
+ pfs_raw.dropna(subset=['Median']).reset_index(drop=True)
801
+ pfs_raw = pfs_raw.reset_index(drop=True)
802
+ pfs_raw = pfs_raw.sort_values(by=['Own', 'Median'], ascending=False)
803
+
804
+ cs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('C')]
805
+ cs_raw.dropna(subset=['Median']).reset_index(drop=True)
806
+ cs_raw = cs_raw.reset_index(drop=True)
807
+ cs_raw = cs_raw.sort_values(by=['Own', 'Median'], ascending=False)
808
+
809
+ gs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('G')]
810
+ gs_raw.dropna(subset=['Median']).reset_index(drop=True)
811
+ gs_raw = gs_raw.reset_index(drop=True)
812
+ gs_raw = gs_raw.sort_values(by=['Own', 'Value'], ascending=False)
813
+
814
+ fs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('F')]
815
+ fs_raw.dropna(subset=['Median']).reset_index(drop=True)
816
+ fs_raw = fs_raw.reset_index(drop=True)
817
+ fs_raw = fs_raw.sort_values(by=['Own', 'Value'], ascending=False)
818
+
819
+ pos_players = pd.concat([pgs_raw, sgs_raw, sfs_raw, pfs_raw, cs_raw, gs_raw, fs_raw])
820
+ pos_players.dropna(subset=['Median']).reset_index(drop=True)
821
+ pos_players = pos_players.reset_index(drop=True)
822
+
823
+ if insert_port == 1:
824
+ try:
825
+ # Initialize an empty DataFrame for Raw Portfolio
826
+ Raw_Portfolio = pd.DataFrame()
827
+
828
+ # Loop through each position and split the data accordingly
829
+ positions = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']
830
+ for pos in positions:
831
+ temp_df = UserPortfolio[pos].str.split("(", n=1, expand=True)
832
+ temp_df.columns = [pos, 'Drop']
833
+ Raw_Portfolio = pd.concat([Raw_Portfolio, temp_df], axis=1)
834
+
835
+ # Select only necessary columns and strip white spaces
836
+ CleanPortfolio = Raw_Portfolio[positions].apply(lambda x: x.str.strip())
837
+ CleanPortfolio.reset_index(inplace=True)
838
+ CleanPortfolio['User/Field'] = CleanPortfolio['index'] + 1
839
+ CleanPortfolio.drop(columns=['index'], inplace=True)
840
+
841
+ CleanPortfolio.replace('', np.nan, inplace=True)
842
+ CleanPortfolio.dropna(subset=['PG'], inplace=True)
843
+
844
+ # Create frequency table for players
845
+ cleaport_players = pd.DataFrame(
846
+ np.column_stack(np.unique(CleanPortfolio.iloc[:, 0:9].values, return_counts=True)),
847
+ columns=['Player', 'Freq']
848
+ ).sort_values('Freq', ascending=False).reset_index(drop=True)
849
+ cleaport_players['Freq'] = cleaport_players['Freq'].astype(int)
850
+
851
+ # Merge and update nerf_frame
852
+ nerf_frame = pd.merge(cleaport_players, Overall_Proj, on='Player', how='left')
853
+ for col in ['Median', 'Floor', 'Ceiling', 'STDev']:
854
+ nerf_frame[col] *= 0.90
855
+ except:
856
+ CleanPortfolio = UserPortfolio.reset_index()
857
+ CleanPortfolio['User/Field'] = CleanPortfolio['index'] + 1
858
+ CleanPortfolio.drop(columns=['index'], inplace=True)
859
+
860
+ CleanPortfolio.replace('', np.nan, inplace=True)
861
+ CleanPortfolio.dropna(subset=['PG'], inplace=True)
862
+
863
+ # Create frequency table for players
864
+ cleaport_players = pd.DataFrame(
865
+ np.column_stack(np.unique(CleanPortfolio.iloc[:, 0:9].values, return_counts=True)),
866
+ columns=['Player', 'Freq']
867
+ ).sort_values('Freq', ascending=False).reset_index(drop=True)
868
+ cleaport_players['Freq'] = cleaport_players['Freq'].astype(int)
869
+
870
+ # Merge and update nerf_frame
871
+ nerf_frame = pd.merge(cleaport_players, Overall_Proj, on='Player', how='left')
872
+ for col in ['Median', 'Floor', 'Ceiling', 'STDev']:
873
+ nerf_frame[col] *= 0.90
874
+
875
+ elif insert_port == 0:
876
+ CleanPortfolio = UserPortfolio
877
+ cleaport_players = pd.DataFrame(np.column_stack(np.unique(CleanPortfolio.iloc[:,0:9].values, return_counts=True)),
878
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
879
+ cleaport_players['Freq'] = cleaport_players['Freq'].astype(int)
880
+ nerf_frame = Overall_Proj
881
+
882
+ ref_dict = {
883
+ 'pos':['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL'],
884
+ 'pos_dfs':['PG_Table', 'SG_Table', 'SF_Table', 'PF_Table', 'C_Table', 'G_Table', 'F_Table', 'UTIL_Table'],
885
+ 'pos_dicts':['pg_dict', 'sg_dict', 'sf_dict', 'pf_dict', 'c_dict', 'g_dict', 'f_dict', 'util_dict']
886
+ }
887
+
888
+ maps_dict = {
889
+ 'Floor_map':dict(zip(Overall_Proj.Player,Overall_Proj.Floor)),
890
+ 'Projection_map':dict(zip(Overall_Proj.Player,Overall_Proj.Median)),
891
+ 'Ceiling_map':dict(zip(Overall_Proj.Player,Overall_Proj.Ceiling)),
892
+ 'Salary_map':dict(zip(Overall_Proj.Player,Overall_Proj.Salary)),
893
+ 'Pos_map':dict(zip(Overall_Proj.Player,Overall_Proj.Position)),
894
+ 'Own_map':dict(zip(Overall_Proj.Player,Overall_Proj.Own)),
895
+ 'Team_map':dict(zip(Overall_Proj.Player,Overall_Proj.Team)),
896
+ 'STDev_map':dict(zip(Overall_Proj.Player,Overall_Proj.STDev)),
897
+ 'team_check_map':dict(zip(Overall_Proj.Player,Overall_Proj.Team))
898
+ }
899
+
900
+ up_dict = {
901
+ 'Floor_map':dict(zip(cleaport_players.Player,nerf_frame.Floor)),
902
+ 'Projection_map':dict(zip(cleaport_players.Player,nerf_frame.Median)),
903
+ 'Ceiling_map':dict(zip(cleaport_players.Player,nerf_frame.Ceiling)),
904
+ 'Salary_map':dict(zip(cleaport_players.Player,nerf_frame.Salary)),
905
+ 'Pos_map':dict(zip(cleaport_players.Player,nerf_frame.Position)),
906
+ 'Own_map':dict(zip(cleaport_players.Player,nerf_frame.Own)),
907
+ 'Team_map':dict(zip(cleaport_players.Player,nerf_frame.Team)),
908
+ 'STDev_map':dict(zip(cleaport_players.Player,nerf_frame.STDev)),
909
+ 'team_check_map':dict(zip(cleaport_players.Player,nerf_frame.Team))
910
+ }
911
+
912
+ FinalPortfolio, maps_dict = run_seed_frame(5, Strength_var, strength_grow, Teams_used, 1000000, field_growth)
913
+
914
+ Sim_Winners = sim_contest(2500, FinalPortfolio, CleanPortfolio, maps_dict, up_dict, insert_port)
915
+
916
+ # Initial setup
917
+ Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners), columns=FinalPortfolio.columns.tolist() + ['Fantasy'])
918
+ Sim_Winner_Frame['GPP_Proj'] = (Sim_Winner_Frame['Projection'] + Sim_Winner_Frame['Fantasy']) / 2
919
+ Sim_Winner_Frame['unique_id'] = Sim_Winner_Frame['Projection'].astype(str) + Sim_Winner_Frame['Salary'].astype(str) + Sim_Winner_Frame['Own'].astype(str)
920
+ Sim_Winner_Frame = Sim_Winner_Frame.assign(win_count=Sim_Winner_Frame['unique_id'].map(Sim_Winner_Frame['unique_id'].value_counts()))
921
+
922
+ # Type Casting
923
+ type_cast_dict = {'Salary': int, 'Projection': np.float16, 'Fantasy': np.float16, 'GPP_Proj': np.float32}
924
+ Sim_Winner_Frame = Sim_Winner_Frame.astype(type_cast_dict)
925
+
926
+ del FinalPortfolio, insert_port, type_cast_dict
927
+
928
+ # Sorting
929
+ st.session_state.Sim_Winner_Frame = Sim_Winner_Frame.sort_values(by=['win_count', 'GPP_Proj'], ascending= [False, False]).copy().drop_duplicates(subset='unique_id').head(100)
930
+ st.session_state.Sim_Winner_Frame.drop(columns='unique_id', inplace=True)
931
+
932
+ # Data Copying
933
+ st.session_state.Sim_Winner_Export = Sim_Winner_Frame.copy()
934
+
935
+ # Data Copying
936
+ st.session_state.Sim_Winner_Display = Sim_Winner_Frame.copy()
937
+
938
+ # Conditional Replacement
939
+ columns_to_replace = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']
940
+
941
+ if site_var1 == 'Draftkings':
942
+ replace_dict = dkid_dict
943
+ elif site_var1 == 'Fanduel':
944
+ replace_dict = fdid_dict
945
+
946
+ for col in columns_to_replace:
947
+ st.session_state.Sim_Winner_Export[col].replace(replace_dict, inplace=True)
948
+
949
+ del replace_dict, Sim_Winner_Frame, Sim_Winners
950
+
951
+ st.session_state.player_freq = pd.DataFrame(np.column_stack(np.unique(st.session_state.Sim_Winner_Display.iloc[:,0:9].values, return_counts=True)),
952
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
953
+ st.session_state.player_freq['Freq'] = st.session_state.player_freq['Freq'].astype(int)
954
+ st.session_state.player_freq['Position'] = st.session_state.player_freq['Player'].map(maps_dict['Pos_map'])
955
+ st.session_state.player_freq['Salary'] = st.session_state.player_freq['Player'].map(maps_dict['Salary_map'])
956
+ st.session_state.player_freq['Proj Own'] = st.session_state.player_freq['Player'].map(maps_dict['Own_map']) / 100
957
+ st.session_state.player_freq['Exposure'] = st.session_state.player_freq['Freq']/(2500)
958
+ st.session_state.player_freq['Edge'] = st.session_state.player_freq['Exposure'] - st.session_state.player_freq['Proj Own']
959
+ st.session_state.player_freq['Team'] = st.session_state.player_freq['Player'].map(maps_dict['Team_map'])
960
+ for checkVar in range(len(team_list)):
961
+ st.session_state.player_freq['Team'] = st.session_state.player_freq['Team'].replace(item_list, team_list)
962
+
963
+ with st.container():
964
+ if 'player_freq' in st.session_state:
965
+ player_split_var2 = st.radio("Are you wanting to isolate any lineups with specific players?", ('Full Players', 'Specific Players'), key='player_split_var2')
966
+ if player_split_var2 == 'Specific Players':
967
+ find_var2 = st.multiselect('Which players must be included in the lineups?', options = st.session_state.player_freq['Player'].unique())
968
+ elif player_split_var2 == 'Full Players':
969
+ find_var2 = st.session_state.player_freq.Player.values.tolist()
970
+
971
+ if player_split_var2 == 'Specific Players':
972
+ st.session_state.Sim_Winner_Display = st.session_state.Sim_Winner_Frame[np.equal.outer(st.session_state.Sim_Winner_Frame.to_numpy(), find_var2).any(axis=1).all(axis=1)]
973
+ if player_split_var2 == 'Full Players':
974
+ st.session_state.Sim_Winner_Display = st.session_state.Sim_Winner_Frame
975
+ if 'Sim_Winner_Display' in st.session_state:
976
+ st.dataframe(st.session_state.Sim_Winner_Display.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').background_gradient(cmap='RdYlGn_r', subset=['Own']).format(precision=2), use_container_width = True)
977
+ # if 'Sim_Winner_Export' in st.session_state:
978
+ # st.download_button(
979
+ # label="Export Full Frame",
980
+ # data=st.session_state.Sim_Winner_Export.to_csv().encode('utf-8'),
981
+ # file_name='NFL_consim_export.csv',
982
+ # mime='text/csv',
983
+ # )
984
+
985
+ with st.container():
986
+ tab1 = st.tabs(['Overall Exposures'])
987
+ with tab1:
988
+ if 'player_freq' in st.session_state:
989
+ st.dataframe(st.session_state.player_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
990
+ # st.download_button(
991
+ # label="Export Exposures",
992
+ # data=st.session_state.player_freq.to_csv().encode('utf-8'),
993
+ # file_name='player_freq_export.csv',
994
+ # mime='text/csv',
995
+ # )
996
+
997
+ del gcservice_account
998
+ del dk_roo_raw, fd_roo_raw
999
+ del t_stamp
1000
+ del dkid_dict, fdid_dict
1001
+ del static_exposure, overall_exposure
1002
+ del insert_port1, Contest_Size, sharp_split, Strength_var, scaling_var, Sort_function, Sim_function, strength_grow, field_growth
1003
+ del raw_baselines
1004
+ del freq_format
1005
+
1006
+ gc.collect()