import streamlit as st st.set_page_config(layout="wide") for name in dir(): if not name.startswith('_'): del globals()[name] import numpy as np import pandas as pd import streamlit as st import gspread import random import gc @st.cache_resource def init_conn(): scope = ['https://www.googleapis.com/auth/spreadsheets', "https://www.googleapis.com/auth/drive"] credentials = { "type": "service_account", "project_id": "sheets-api-connect-378620", "private_key_id": "1005124050c80d085e2c5b344345715978dd9cc9", "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtKa01beXwc88R\nnPZVQTNPVQuBnbwoOfc66gW3547ja/UEyIGAF112dt/VqHprRafkKGmlg55jqJNt\na4zceLKV+wTm7vBu7lDISTJfGzCf2TrxQYNqwMKE2LOjI69dBM8u4Dcb4k0wcp9v\ntW1ZzLVVuwTvmrg7JBHjiSaB+x5wxm/r3FOiJDXdlAgFlytzqgcyeZMJVKKBQHyJ\njEGg/1720A0numuOCt71w/2G0bDmijuj1e6tH32MwRWcvRNZ19K9ssyDz2S9p68s\nYDhIxX69OWxwScTIHLY6J2t8txf/XMivL/636fPlDADvBEVTdlT606n8CcKUVQeq\npUVdG+lfAgMBAAECggEAP38SUA7B69eTfRpo658ycOs3Amr0JW4H/bb1rNeAul0K\nZhwd/HnU4E07y81xQmey5kN5ZeNrD5EvqkZvSyMJHV0EEahZStwhjCfnDB/cxyix\nZ+kFhv4y9eK+kFpUAhBy5nX6T0O+2T6WvzAwbmbVsZ+X8kJyPuF9m8ldcPlD0sce\ntj8NwVq1ys52eosqs7zi2vjt+eMcaY393l4ls+vNq8Yf27cfyFw45W45CH/97/Nu\n5AmuzlCOAfFF+z4OC5g4rei4E/Qgpxa7/uom+BVfv9G0DIGW/tU6Sne0+37uoGKt\nW6DzhgtebUtoYkG7ZJ05BTXGp2lwgVcNRoPwnKJDxQKBgQDT5wYPUBDW+FHbvZSp\nd1m1UQuXyerqOTA9smFaM8sr/UraeH85DJPEIEk8qsntMBVMhvD3Pw8uIUeFNMYj\naLmZFObsL+WctepXrVo5NB6RtLB/jZYxiKMatMLUJIYtcKIp+2z/YtKiWcLnwotB\nWdCjVnPTxpkurmF2fWP/eewZ+wKBgQDRMtJg7etjvKyjYNQ5fARnCc+XsI3gkBe1\nX9oeXfhyfZFeBXWnZzN1ITgFHplDznmBdxAyYGiQdbbkdKQSghviUQ0igBvoDMYy\n1rWcy+a17Mj98uyNEfmb3X2cC6WpvOZaGHwg9+GY67BThwI3FqHIbyk6Ko09WlTX\nQpRQjMzU7QKBgAfi1iflu+q0LR+3a3vvFCiaToskmZiD7latd9AKk2ocsBd3Woy9\n+hXXecJHPOKV4oUJlJgvAZqe5HGBqEoTEK0wyPNLSQlO/9ypd+0fEnArwFHO7CMF\nycQprAKHJXM1eOOFFuZeQCaInqdPZy1UcV5Szla4UmUZWkk1m24blHzXAoGBAMcA\nyH4qdbxX9AYrC1dvsSRvgcnzytMvX05LU0uF6tzGtG0zVlub4ahvpEHCfNuy44UT\nxRWW/oFFaWjjyFxO5sWggpUqNuHEnRopg3QXx22SRRTGbN45li/+QAocTkgsiRh1\nqEcYZsO4mPCsQqAy6E2p6RcK+Xa+omxvSnVhq0x1AoGAKr8GdkCl4CF6rieLMAQ7\nLNBuuoYGaHoh8l5E2uOQpzwxVy/nMBcAv+2+KqHEzHryUv1owOi6pMLv7A9mTFoS\n18B0QRLuz5fSOsVnmldfC9fpUc6H8cH1SINZpzajqQA74bPwELJjnzrCnH79TnHG\nJuElxA33rFEjbgbzdyrE768=\n-----END PRIVATE KEY-----\n", "client_email": "gspread-connection@sheets-api-connect-378620.iam.gserviceaccount.com", "client_id": "106625872877651920064", "auth_uri": "https://accounts.google.com/o/oauth2/auth", "token_uri": "https://oauth2.googleapis.com/token", "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40sheets-api-connect-378620.iam.gserviceaccount.com" } gc_con = gspread.service_account_from_dict(credentials) return gc_con gcservice_account = init_conn() freq_format = {'Proj Own': '{:.2%}', 'Exposure': '{:.2%}', 'Edge': '{:.2%}'} @st.cache_resource(ttl = 301) def init_baslines(): sh = gcservice_account.open_by_url('https://docs.google.com/spreadsheets/d/1Yq0vGriWK-bS79e-bD6_u9pqrYE6Yrlbb_wEkmH-ot0/edit#gid=172632260') worksheet = sh.worksheet('DK_Build_Up') load_display = pd.DataFrame(worksheet.get_all_records()) load_display.replace('', np.nan, inplace=True) load_display.rename(columns={"Fantasy": "Median", 'Name': 'Player'}, inplace = True) load_display = load_display[load_display['Median'] > 0] dk_roo_raw = load_display.dropna(subset=['Median']) worksheet = sh.worksheet('FD_Build_Up') load_display = pd.DataFrame(worksheet.get_all_records()) load_display.replace('', np.nan, inplace=True) load_display.rename(columns={"Fantasy": "Median", 'Nickname': 'Player'}, inplace = True) load_display = load_display[load_display['Median'] > 0] fd_roo_raw = load_display.dropna(subset=['Median']) worksheet = sh.worksheet('DK_Salaries') load_display = pd.DataFrame(worksheet.get_all_records()) load_display.replace('', np.nan, inplace=True) raw_display = load_display.dropna(subset=['Median']) raw_display.rename(columns={"name": "Player"}, inplace = True) dk_ids = dict(zip(raw_display['Player'], raw_display['player_id'])) worksheet = sh.worksheet('FD_Salaries') load_display = pd.DataFrame(worksheet.get_all_records()) load_display.replace('', np.nan, inplace=True) raw_display = load_display.dropna(subset=['Median']) raw_display.rename(columns={"name": "Player"}, inplace = True) fd_ids = dict(zip(raw_display['Player'], raw_display['player_id'])) worksheet = sh.worksheet('Timestamp') timestamp = worksheet.acell('A1').value return dk_roo_raw, fd_roo_raw, dk_ids, fd_ids, timestamp dk_roo_raw, fd_roo_raw, dkid_dict, fdid_dict, timestamp = init_baslines() t_stamp = f"Last Update: " + str(timestamp) + f" CST" static_exposure = pd.DataFrame(columns=['Player', 'count']) overall_exposure = pd.DataFrame(columns=['Player', 'count']) def sim_contest(Sim_size, FinalPortfolio, CleanPortfolio, maps_dict, up_dict, insert_port): SimVar = 1 Sim_Winners = [] fp_array = FinalPortfolio.values if insert_port == 1: up_array = CleanPortfolio.values # Pre-vectorize functions vec_projection_map = np.vectorize(maps_dict['Projection_map'].__getitem__) vec_stdev_map = np.vectorize(maps_dict['STDev_map'].__getitem__) if insert_port == 1: vec_up_projection_map = np.vectorize(up_dict['Projection_map'].__getitem__) vec_up_stdev_map = np.vectorize(up_dict['STDev_map'].__getitem__) st.write('Simulating contest on frames') while SimVar <= Sim_size: if insert_port == 1: fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size-len(CleanPortfolio))] elif insert_port == 0: fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size)] sample_arrays1 = np.c_[ fp_random, np.sum(np.random.normal( loc=vec_projection_map(fp_random[:, :-5]), scale=vec_stdev_map(fp_random[:, :-5])), axis=1) ] if insert_port == 1: sample_arrays2 = np.c_[ up_array, np.sum(np.random.normal( loc=vec_up_projection_map(up_array[:, :-5]), scale=vec_up_stdev_map(up_array[:, :-5])), axis=1) ] sample_arrays = np.vstack((sample_arrays1, sample_arrays2)) else: sample_arrays = sample_arrays1 final_array = sample_arrays[sample_arrays[:, 9].argsort()[::-1]] best_lineup = final_array[final_array[:, -1].argsort(kind='stable')[::-1][:1]] Sim_Winners.append(best_lineup) SimVar += 1 return Sim_Winners def run_seed_frame(seed_depth1, Strength_var, strength_grow, Teams_used, Total_Runs, field_growth): RunsVar = 1 seed_depth_def = seed_depth1 Strength_var_def = Strength_var strength_grow_def = strength_grow Teams_used_def = Teams_used Total_Runs_def = Total_Runs st.write('Creating Seed Frames') while RunsVar <= seed_depth_def: if RunsVar <= 3: FieldStrength = Strength_var_def FinalPortfolio, maps_dict = get_correlated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth) FinalPortfolio2, maps_dict2 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth) FinalPortfolio_init = pd.concat([FinalPortfolio, FinalPortfolio2], axis=0) maps_dict.update(maps_dict2) elif RunsVar > 3 and RunsVar <= 4: FieldStrength += (strength_grow_def + ((30 - len(Teams_used_def)) * .001)) FinalPortfolio3, maps_dict3 = get_correlated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth) FinalPortfolio4, maps_dict4 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth) FinalPortfolio_merge_3 = pd.concat([FinalPortfolio_init, FinalPortfolio3], axis=0) FinalPortfolio_merge_4 = pd.concat([FinalPortfolio_merge_3, FinalPortfolio4], axis=0) FinalPortfolio_step_2 = FinalPortfolio_merge_4.drop_duplicates(subset = ['Projection', 'Own'],keep = 'last').reset_index(drop = True) maps_dict.update(maps_dict3) maps_dict.update(maps_dict4) elif RunsVar > 4: FieldStrength = 1 FinalPortfolio5, maps_dict5 = get_correlated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth) FinalPortfolio6, maps_dict6 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth) FinalPortfolio_merge_5 = pd.concat([FinalPortfolio_step_2, FinalPortfolio5], axis=0) FinalPortfolio_merge_6 = pd.concat([FinalPortfolio_merge_5, FinalPortfolio6], axis=0) FinalPortfolio_export = FinalPortfolio_merge_6.drop_duplicates(subset = ['Projection', 'Own'],keep = 'last').reset_index(drop = True) maps_dict.update(maps_dict5) maps_dict.update(maps_dict6) RunsVar += 1 return FinalPortfolio_export, maps_dict def create_overall_dfs(pos_players, table_name, dict_name, pos): if pos == "UTIL": pos_players = pos_players.sort_values(by='Value', ascending=False) table_name_raw = pos_players.reset_index(drop=True) overall_table_name = table_name_raw.head(round(len(table_name_raw))) overall_table_name = overall_table_name.assign(Var = range(0,len(overall_table_name))) overall_dict_name = pd.Series(overall_table_name.Player.values, index=overall_table_name.Var).to_dict() elif pos != "UTIL": table_name_raw = pos_players[pos_players['Position'].str.contains(pos)].reset_index(drop=True) overall_table_name = table_name_raw.head(round(len(table_name_raw))) overall_table_name = overall_table_name.assign(Var = range(0,len(overall_table_name))) overall_dict_name = pd.Series(overall_table_name.Player.values, index=overall_table_name.Var).to_dict() return overall_table_name, overall_dict_name def get_overall_merged_df(): ref_dict = { 'pos':['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL'], 'pos_dfs':['PG_Table', 'SG_Table', 'SF_Table', 'PF_Table', 'C_Table', 'G_Table', 'F_Table', 'UTIL_Table'], 'pos_dicts':['pg_dict', 'sg_dict', 'sf_dict', 'pf_dict', 'c_dict', 'g_dict', 'f_dict', 'util_dict'] } for i in range(0,8): ref_dict['pos_dfs'][i], ref_dict['pos_dicts'][i] =\ create_overall_dfs(pos_players, ref_dict['pos_dfs'][i], ref_dict['pos_dicts'][i], ref_dict['pos'][i]) df_out = pd.concat(ref_dict['pos_dfs'], ignore_index=True) return ref_dict def calculate_range_var(count, min_val, FieldStrength, field_growth): var = round(len(count[0]) * FieldStrength) var = max(var, min_val) var += round(field_growth) return min(var, len(count[0])) def create_random_portfolio(Total_Sample_Size, raw_baselines, field_growth): full_pos_player_dict = get_overall_merged_df() field_growth_rounded = round(field_growth) ranges_dict = {} # Calculate ranges for df, dict_val, min_val, key in zip(ref_dict['pos_dfs'], ref_dict['pos_dicts'], [20, 15, 15, 20, 20, 30, 30, 50], ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']): count = create_overall_dfs(pos_players, df, dict_val, key) ranges_dict[f"{key.lower()}_range"] = calculate_range_var(count, min_val, FieldStrength, field_growth_rounded) # Generate random portfolios rng = np.random.default_rng() total_elements = [1, 1, 1, 1, 1, 1, 1, 1] keys = ['pg', 'sg', 'sf', 'pf', 'c', 'g', 'f', 'util'] all_choices = [rng.choice(ranges_dict[f"{key}_range"], size=(Total_Sample_Size, elem)) for key, elem in zip(keys, total_elements)] RandomPortfolio = pd.DataFrame(np.hstack(all_choices), columns=['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']) RandomPortfolio['User/Field'] = 0 return RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict def get_correlated_portfolio_for_sim(Total_Sample_Size, sharp_split, field_growth): sizesplit = round(Total_Sample_Size * sharp_split) RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict = create_random_portfolio(sizesplit, raw_baselines, field_growth) RandomPortfolio['PG'] = pd.Series(list(RandomPortfolio['PG'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]") RandomPortfolio['SG'] = pd.Series(list(RandomPortfolio['SG'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]") RandomPortfolio['SF'] = pd.Series(list(RandomPortfolio['SF'].map(full_pos_player_dict['pos_dicts'][2])), dtype="string[pyarrow]") RandomPortfolio['PF'] = pd.Series(list(RandomPortfolio['PF'].map(full_pos_player_dict['pos_dicts'][3])), dtype="string[pyarrow]") RandomPortfolio['C'] = pd.Series(list(RandomPortfolio['C'].map(full_pos_player_dict['pos_dicts'][4])), dtype="string[pyarrow]") RandomPortfolio['G'] = pd.Series(list(RandomPortfolio['G'].map(full_pos_player_dict['pos_dicts'][5])), dtype="string[pyarrow]") RandomPortfolio['F'] = pd.Series(list(RandomPortfolio['F'].map(full_pos_player_dict['pos_dicts'][6])), dtype="string[pyarrow]") RandomPortfolio['UTIL'] = pd.Series(list(RandomPortfolio['UTIL'].map(full_pos_player_dict['pos_dicts'][7])), dtype="string[pyarrow]") RandomPortfolio['plyr_list'] = RandomPortfolio[RandomPortfolio.columns.values.tolist()].values.tolist() RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x))) RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 9].drop(columns=['plyr_list','plyr_count']).\ reset_index(drop=True) RandomPortfolio['PGs'] = RandomPortfolio['PG'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['SGs'] = RandomPortfolio['SG'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['SFs'] = RandomPortfolio['SF'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['PFs'] = RandomPortfolio['PF'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['Cs'] = RandomPortfolio['C'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['Gs'] = RandomPortfolio['G'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['Fs'] = RandomPortfolio['F'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['UTILs'] = RandomPortfolio['UTIL'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['PGp'] = RandomPortfolio['PG'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['SGp'] = RandomPortfolio['SG'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['SFp'] = RandomPortfolio['SF'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['PFp'] = RandomPortfolio['PF'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['Cp'] = RandomPortfolio['C'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['Gp'] = RandomPortfolio['G'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['Fp'] = RandomPortfolio['F'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['UTILp'] = RandomPortfolio['UTIL'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['PGo'] = RandomPortfolio['PG'].map(maps_dict['Own_map']).astype(np.float16) RandomPortfolio['SGo'] = RandomPortfolio['SG'].map(maps_dict['Own_map']).astype(np.float16) RandomPortfolio['SFo'] = RandomPortfolio['SF'].map(maps_dict['Own_map']).astype(np.float16) RandomPortfolio['PFo'] = RandomPortfolio['PF'].map(maps_dict['Own_map']).astype(np.float16) RandomPortfolio['Co'] = RandomPortfolio['C'].map(maps_dict['Own_map']).astype(np.float16) RandomPortfolio['Go'] = RandomPortfolio['G'].map(maps_dict['Own_map']).astype(np.float16) RandomPortfolio['Fo'] = RandomPortfolio['F'].map(maps_dict['Own_map']).astype(np.float16) RandomPortfolio['UTILo'] = RandomPortfolio['UTIL'].map(maps_dict['Own_map']).astype(np.float16) RandomPortArray = RandomPortfolio.to_numpy() RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,9:17].astype(int))] RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,17:25].astype(np.double))] RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,25:33].astype(np.double))] RandomPortArrayOut = np.delete(RandomPortArray, np.s_[9:33], axis=1) RandomPortfolioDF = pd.DataFrame(RandomPortArrayOut, columns = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL', 'User/Field', 'Salary', 'Projection', 'Own']) RandomPortfolioDF = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False) if insert_port == 1: CleanPortfolio['Salary'] = sum([CleanPortfolio['PG'].map(maps_dict['Salary_map']), CleanPortfolio['SG'].map(maps_dict['Salary_map']), CleanPortfolio['SF'].map(maps_dict['Salary_map']), CleanPortfolio['PF'].map(maps_dict['Salary_map']), CleanPortfolio['C'].map(maps_dict['Salary_map']), CleanPortfolio['G'].map(maps_dict['Salary_map']), CleanPortfolio['F'].map(maps_dict['Salary_map']), CleanPortfolio['UTIL'].map(maps_dict['Salary_map']) ]).astype(np.int16) if insert_port == 1: CleanPortfolio['Projection'] = sum([CleanPortfolio['PG'].map(maps_dict['Projection_map']), CleanPortfolio['SG'].map(maps_dict['Projection_map']), CleanPortfolio['SF'].map(maps_dict['Projection_map']), CleanPortfolio['PF'].map(maps_dict['Projection_map']), CleanPortfolio['C'].map(maps_dict['Projection_map']), CleanPortfolio['G'].map(maps_dict['Projection_map']), CleanPortfolio['F'].map(maps_dict['Projection_map']), CleanPortfolio['UTIL'].map(maps_dict['Projection_map']) ]).astype(np.float16) if insert_port == 1: CleanPortfolio['Own'] = sum([CleanPortfolio['PG'].map(maps_dict['Own_map']), CleanPortfolio['SG'].map(maps_dict['Own_map']), CleanPortfolio['SF'].map(maps_dict['Own_map']), CleanPortfolio['PF'].map(maps_dict['Own_map']), CleanPortfolio['C'].map(maps_dict['Own_map']), CleanPortfolio['G'].map(maps_dict['Own_map']), CleanPortfolio['F'].map(maps_dict['Own_map']), CleanPortfolio['UTIL'].map(maps_dict['Own_map']) ]).astype(np.float16) if site_var1 == 'Draftkings': RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 50000].reset_index(drop=True) RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (49500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True) elif site_var1 == 'Fanduel': RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 60000].reset_index(drop=True) RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (59500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True) RandomPortfolio = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False) RandomPortfolio = RandomPortfolio[['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL', 'User/Field', 'Salary', 'Projection', 'Own']] return RandomPortfolio, maps_dict def get_uncorrelated_portfolio_for_sim(Total_Sample_Size, sharp_split, field_growth): sizesplit = round(Total_Sample_Size * sharp_split) RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict = create_random_portfolio(sizesplit, raw_baselines, field_growth) RandomPortfolio['PG'] = pd.Series(list(RandomPortfolio['PG'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]") RandomPortfolio['SG'] = pd.Series(list(RandomPortfolio['SG'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]") RandomPortfolio['SF'] = pd.Series(list(RandomPortfolio['SF'].map(full_pos_player_dict['pos_dicts'][2])), dtype="string[pyarrow]") RandomPortfolio['PF'] = pd.Series(list(RandomPortfolio['PF'].map(full_pos_player_dict['pos_dicts'][3])), dtype="string[pyarrow]") RandomPortfolio['C'] = pd.Series(list(RandomPortfolio['C'].map(full_pos_player_dict['pos_dicts'][4])), dtype="string[pyarrow]") RandomPortfolio['G'] = pd.Series(list(RandomPortfolio['G'].map(full_pos_player_dict['pos_dicts'][5])), dtype="string[pyarrow]") RandomPortfolio['F'] = pd.Series(list(RandomPortfolio['F'].map(full_pos_player_dict['pos_dicts'][6])), dtype="string[pyarrow]") RandomPortfolio['UTIL'] = pd.Series(list(RandomPortfolio['UTIL'].map(full_pos_player_dict['pos_dicts'][7])), dtype="string[pyarrow]") RandomPortfolio['plyr_list'] = RandomPortfolio[RandomPortfolio.columns.values.tolist()].values.tolist() RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x))) RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 9].drop(columns=['plyr_list','plyr_count']).\ reset_index(drop=True) RandomPortfolio['PGs'] = RandomPortfolio['PG'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['SGs'] = RandomPortfolio['SG'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['SFs'] = RandomPortfolio['SF'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['PFs'] = RandomPortfolio['PF'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['Cs'] = RandomPortfolio['C'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['Gs'] = RandomPortfolio['G'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['Fs'] = RandomPortfolio['F'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['UTILs'] = RandomPortfolio['UTIL'].map(maps_dict['Salary_map']).astype(np.int32) RandomPortfolio['PGp'] = RandomPortfolio['PG'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['SGp'] = RandomPortfolio['SG'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['SFp'] = RandomPortfolio['SF'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['PFp'] = RandomPortfolio['PF'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['Cp'] = RandomPortfolio['C'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['Gp'] = RandomPortfolio['G'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['Fp'] = RandomPortfolio['F'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['UTILp'] = RandomPortfolio['UTIL'].map(maps_dict['Projection_map']).astype(np.float16) RandomPortfolio['PGo'] = RandomPortfolio['PG'].map(maps_dict['Own_map']).astype(np.float16) RandomPortfolio['SGo'] = RandomPortfolio['SG'].map(maps_dict['Own_map']).astype(np.float16) RandomPortfolio['SFo'] = RandomPortfolio['SF'].map(maps_dict['Own_map']).astype(np.float16) RandomPortfolio['PFo'] = RandomPortfolio['PF'].map(maps_dict['Own_map']).astype(np.float16) RandomPortfolio['Co'] = RandomPortfolio['C'].map(maps_dict['Own_map']).astype(np.float16) RandomPortfolio['Go'] = RandomPortfolio['G'].map(maps_dict['Own_map']).astype(np.float16) RandomPortfolio['Fo'] = RandomPortfolio['F'].map(maps_dict['Own_map']).astype(np.float16) RandomPortfolio['UTILo'] = RandomPortfolio['UTIL'].map(maps_dict['Own_map']).astype(np.float16) RandomPortArray = RandomPortfolio.to_numpy() RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,9:17].astype(int))] RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,17:25].astype(np.double))] RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,25:33].astype(np.double))] RandomPortArrayOut = np.delete(RandomPortArray, np.s_[9:33], axis=1) RandomPortfolioDF = pd.DataFrame(RandomPortArrayOut, columns = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL', 'User/Field', 'Salary', 'Projection', 'Own']) RandomPortfolioDF = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False) if insert_port == 1: CleanPortfolio['Salary'] = sum([CleanPortfolio['PG'].map(maps_dict['Salary_map']), CleanPortfolio['SG'].map(maps_dict['Salary_map']), CleanPortfolio['SF'].map(maps_dict['Salary_map']), CleanPortfolio['PF'].map(maps_dict['Salary_map']), CleanPortfolio['C'].map(maps_dict['Salary_map']), CleanPortfolio['G'].map(maps_dict['Salary_map']), CleanPortfolio['F'].map(maps_dict['Salary_map']), CleanPortfolio['UTIL'].map(maps_dict['Salary_map']) ]).astype(np.int16) if insert_port == 1: CleanPortfolio['Projection'] = sum([CleanPortfolio['PG'].map(maps_dict['Projection_map']), CleanPortfolio['SG'].map(maps_dict['Projection_map']), CleanPortfolio['SF'].map(maps_dict['Projection_map']), CleanPortfolio['PF'].map(maps_dict['Projection_map']), CleanPortfolio['C'].map(maps_dict['Projection_map']), CleanPortfolio['G'].map(maps_dict['Projection_map']), CleanPortfolio['F'].map(maps_dict['Projection_map']), CleanPortfolio['UTIL'].map(maps_dict['Projection_map']) ]).astype(np.float16) if insert_port == 1: CleanPortfolio['Own'] = sum([CleanPortfolio['PG'].map(maps_dict['Own_map']), CleanPortfolio['SG'].map(maps_dict['Own_map']), CleanPortfolio['SF'].map(maps_dict['Own_map']), CleanPortfolio['PF'].map(maps_dict['Own_map']), CleanPortfolio['C'].map(maps_dict['Own_map']), CleanPortfolio['G'].map(maps_dict['Own_map']), CleanPortfolio['F'].map(maps_dict['Own_map']), CleanPortfolio['UTIL'].map(maps_dict['Own_map']) ]).astype(np.float16) if site_var1 == 'Draftkings': RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 50000].reset_index(drop=True) RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (49500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True) elif site_var1 == 'Fanduel': RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 60000].reset_index(drop=True) RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (59500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True) RandomPortfolio = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False) RandomPortfolio = RandomPortfolio[['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL', 'User/Field', 'Salary', 'Projection', 'Own']] return RandomPortfolio, maps_dict tab1, tab2 = st.tabs(['Uploads', 'Contest Sim']) with tab1: st.info("The contest sim currently only works for Draftkings, the roster formation for Fanduel is incorrect. It'll be fixed in the next couple of days!") with st.container(): col1, col2 = st.columns([3, 3]) with col1: st.info("The Projections file can have any columns in any order, but must contain columns explicitly named: 'Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', and 'Own'. Upload your projections first to avoid an error message.") proj_file = st.file_uploader("Upload Projections File", key = 'proj_uploader') if proj_file is not None: try: proj_dataframe = pd.read_csv(proj_file) proj_dataframe = proj_dataframe.dropna(subset='Median') proj_dataframe['Player'] = proj_dataframe['Player'].str.strip() try: proj_dataframe['Own'] = proj_dataframe['Own'].str.strip('%').astype(float) except: pass except: proj_dataframe = pd.read_excel(proj_file) proj_dataframe = proj_dataframe.dropna(subset='Median') proj_dataframe['Player'] = proj_dataframe['Player'].str.strip() try: proj_dataframe['Own'] = proj_dataframe['Own'].str.strip('%').astype(float) except: pass st.table(proj_dataframe.head(10)) player_salary_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Salary)) player_proj_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Median)) player_own_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Own)) with col2: st.info("The Portfolio file must contain only columns in order and explicitly named: 'PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', and 'UTIL'. Upload your projections first to avoid an error message.") portfolio_file = st.file_uploader("Upload Portfolio File", key = 'portfolio_uploader') if portfolio_file is not None: try: portfolio_dataframe = pd.read_csv(portfolio_file) except: portfolio_dataframe = pd.read_excel(portfolio_file) try: try: portfolio_dataframe.columns=['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL'] split_portfolio = portfolio_dataframe split_portfolio[['PG', 'PG_ID']] = split_portfolio.PG.str.split("(", n=1, expand = True) split_portfolio[['SG', 'SG_ID']] = split_portfolio.SG.str.split("(", n=1, expand = True) split_portfolio[['SF', 'SF_ID']] = split_portfolio.SF.str.split("(", n=1, expand = True) split_portfolio[['PF', 'PF_ID']] = split_portfolio.PF.str.split("(", n=1, expand = True) split_portfolio[['C', 'C_ID']] = split_portfolio.C.str.split("(", n=1, expand = True) split_portfolio[['G', 'G_ID']] = split_portfolio.G.str.split("(", n=1, expand = True) split_portfolio[['F', 'F_ID']] = split_portfolio.F.str.split("(", n=1, expand = True) split_portfolio[['UTIL', 'UTIL_ID']] = split_portfolio.UTIL.str.split("(", n=1, expand = True) split_portfolio['PG'] = split_portfolio['PG'].str.strip() split_portfolio['SG'] = split_portfolio['SG'].str.strip() split_portfolio['SF'] = split_portfolio['SF'].str.strip() split_portfolio['PF'] = split_portfolio['PF'].str.strip() split_portfolio['C'] = split_portfolio['C'].str.strip() split_portfolio['G'] = split_portfolio['G'].str.strip() split_portfolio['F'] = split_portfolio['F'].str.strip() split_portfolio['UTIL'] = split_portfolio['UTIL'].str.strip() split_portfolio['Salary'] = sum([split_portfolio['PG'].map(player_salary_dict), split_portfolio['SG'].map(player_salary_dict), split_portfolio['SF'].map(player_salary_dict), split_portfolio['PF'].map(player_salary_dict), split_portfolio['C'].map(player_salary_dict), split_portfolio['G'].map(player_salary_dict), split_portfolio['F'].map(player_salary_dict), split_portfolio['UTIL'].map(player_salary_dict)]) split_portfolio['Projection'] = sum([split_portfolio['PG'].map(player_proj_dict), split_portfolio['SG'].map(player_proj_dict), split_portfolio['SF'].map(player_proj_dict), split_portfolio['PF'].map(player_proj_dict), split_portfolio['C'].map(player_proj_dict), split_portfolio['G'].map(player_proj_dict), split_portfolio['F'].map(player_proj_dict), split_portfolio['UTIL'].map(player_proj_dict)]) split_portfolio['Ownership'] = sum([split_portfolio['PG'].map(player_own_dict), split_portfolio['SG'].map(player_own_dict), split_portfolio['SF'].map(player_own_dict), split_portfolio['PF'].map(player_own_dict), split_portfolio['C'].map(player_own_dict), split_portfolio['G'].map(player_own_dict), split_portfolio['F'].map(player_own_dict), split_portfolio['UTIL'].map(player_own_dict)]) st.table(split_portfolio.head(10)) except: portfolio_dataframe.columns=['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL'] split_portfolio = portfolio_dataframe split_portfolio[['PG_ID', 'PG']] = split_portfolio.PG.str.split(":", n=1, expand = True) split_portfolio[['SG_ID', 'SG']] = split_portfolio.SG.str.split(":", n=1, expand = True) split_portfolio[['SF_ID', 'SF']] = split_portfolio.SF.str.split(":", n=1, expand = True) split_portfolio[['PF_ID', 'PF']] = split_portfolio.PF.str.split(":", n=1, expand = True) split_portfolio[['C_ID', 'C']] = split_portfolio.C.str.split(":", n=1, expand = True) split_portfolio[['G_ID', 'G']] = split_portfolio.G.str.split(":", n=1, expand = True) split_portfolio[['F_ID', 'F']] = split_portfolio.F.str.split(":", n=1, expand = True) split_portfolio[['UTIL_ID', 'UTIL']] = split_portfolio.UTIL.str.split(":", n=1, expand = True) split_portfolio['PG'] = split_portfolio['PG'].str.strip() split_portfolio['SG'] = split_portfolio['SG'].str.strip() split_portfolio['SF'] = split_portfolio['SF'].str.strip() split_portfolio['PF'] = split_portfolio['PF'].str.strip() split_portfolio['C'] = split_portfolio['C'].str.strip() split_portfolio['G'] = split_portfolio['G'].str.strip() split_portfolio['F'] = split_portfolio['F'].str.strip() split_portfolio['UTIL'] = split_portfolio['UTIL'].str.strip() split_portfolio['Salary'] = sum([split_portfolio['PG'].map(player_salary_dict), split_portfolio['SG'].map(player_salary_dict), split_portfolio['SF'].map(player_salary_dict), split_portfolio['PF'].map(player_salary_dict), split_portfolio['C'].map(player_salary_dict), split_portfolio['G'].map(player_salary_dict), split_portfolio['F'].map(player_salary_dict), split_portfolio['UTIL'].map(player_salary_dict)]) split_portfolio['Projection'] = sum([split_portfolio['PG'].map(player_proj_dict), split_portfolio['SG'].map(player_proj_dict), split_portfolio['SF'].map(player_proj_dict), split_portfolio['PF'].map(player_proj_dict), split_portfolio['C'].map(player_proj_dict), split_portfolio['G'].map(player_proj_dict), split_portfolio['F'].map(player_proj_dict), split_portfolio['UTIL'].map(player_proj_dict)]) split_portfolio['Ownership'] = sum([split_portfolio['PG'].map(player_own_dict), split_portfolio['SG'].map(player_own_dict), split_portfolio['SF'].map(player_own_dict), split_portfolio['PF'].map(player_own_dict), split_portfolio['C'].map(player_own_dict), split_portfolio['G'].map(player_own_dict), split_portfolio['F'].map(player_own_dict), split_portfolio['UTIL'].map(player_own_dict)]) st.table(split_portfolio.head(10)) except: split_portfolio = portfolio_dataframe split_portfolio['Salary'] = sum([split_portfolio['PG'].map(player_salary_dict), split_portfolio['SG'].map(player_salary_dict), split_portfolio['SF'].map(player_salary_dict), split_portfolio['PF'].map(player_salary_dict), split_portfolio['C'].map(player_salary_dict), split_portfolio['G'].map(player_salary_dict), split_portfolio['F'].map(player_salary_dict), split_portfolio['UTIL'].map(player_salary_dict)]) split_portfolio['Projection'] = sum([split_portfolio['PG'].map(player_proj_dict), split_portfolio['SG'].map(player_proj_dict), split_portfolio['SF'].map(player_proj_dict), split_portfolio['PF'].map(player_proj_dict), split_portfolio['C'].map(player_proj_dict), split_portfolio['G'].map(player_proj_dict), split_portfolio['F'].map(player_proj_dict), split_portfolio['UTIL'].map(player_proj_dict)]) split_portfolio['Ownership'] = sum([split_portfolio['PG'].map(player_own_dict), split_portfolio['SG'].map(player_own_dict), split_portfolio['SF'].map(player_own_dict), split_portfolio['PF'].map(player_own_dict), split_portfolio['C'].map(player_own_dict), split_portfolio['G'].map(player_own_dict), split_portfolio['F'].map(player_own_dict), split_portfolio['UTIL'].map(player_own_dict)]) gc.collect() with tab2: col1, col2 = st.columns([1, 7]) with col1: st.info(t_stamp) if st.button("Load/Reset Data", key='reset1'): st.cache_data.clear() for key in st.session_state.keys(): del st.session_state[key] dk_roo_raw, fd_roo_raw, dkid_dict, fdid_dict, timestamp = init_baslines() t_stamp = f"Last Update: " + str(timestamp) + f" CST" slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'User')) site_var1 = st.radio("What site are you working with?", ('Draftkings', 'Fanduel')) if site_var1 == 'Draftkings': if slate_var1 == 'User': raw_baselines = proj_dataframe[['Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', 'Own']] elif slate_var1 != 'User': raw_baselines = dk_roo_raw elif site_var1 == 'Fanduel': if slate_var1 == 'User': raw_baselines = proj_dataframe[['Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', 'Own']] elif slate_var1 != 'User': raw_baselines = fd_roo_raw st.info("If you are uploading a portfolio, note that there is an adjustments to projections and deviation mapping to prevent 'Projection Bias' and create a fair simulation") insert_port1 = st.selectbox("Are you uploading a portfolio?", ('No', 'Yes'), key='insert_port1') if insert_port1 == 'Yes': insert_port = 1 elif insert_port1 == 'No': insert_port = 0 contest_var1 = st.selectbox("What contest size are you simulating?", ('Small', 'Medium', 'Large')) if contest_var1 == 'Small': Contest_Size = 500 elif contest_var1 == 'Medium': Contest_Size = 2500 elif contest_var1 == 'Large': Contest_Size = 5000 strength_var1 = st.selectbox("How sharp is the field in the contest?", ('Not Very', 'Average', 'Very')) if strength_var1 == 'Not Very': sharp_split = .33 Strength_var = .50 scaling_var = 5 elif strength_var1 == 'Average': sharp_split = .50 Strength_var = .25 scaling_var = 10 elif strength_var1 == 'Very': sharp_split = .75 Strength_var = .01 scaling_var = 15 Sort_function = 'Median' Sim_function = 'Projection' if Contest_Size <= 1000: strength_grow = .01 elif Contest_Size > 1000 and Contest_Size <= 2500: strength_grow = .025 elif Contest_Size > 2500 and Contest_Size <= 5000: strength_grow = .05 elif Contest_Size > 5000 and Contest_Size <= 20000: strength_grow = .075 elif Contest_Size > 20000: strength_grow = .1 field_growth = 100 * strength_grow with col2: with st.container(): if st.button("Simulate Contest"): with st.container(): for key in st.session_state.keys(): del st.session_state[key] if slate_var1 == 'User': initial_proj = proj_dataframe[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']] # # Define the calculation to be applied # def calculate_own(position, own, mean_own, factor, max_own=85): # return np.where((position == 'C') & (own - mean_own >= 0), # own * (factor * (own - mean_own) / 100) + mean_own, # own) # # Set the factors based on the contest_var1 # factor_c, factor_other = { # 'Small': (10, 5), # 'Medium': (6, 3), # 'Large': (3, 1.5), # }[contest_var1] # # Apply the calculation to the DataFrame # initial_proj['Own%'] = initial_proj.apply(lambda row: calculate_own(row['Position'], row['Own'], initial_proj.loc[initial_proj['Position'] == row['Position'], 'Own'].mean(), factor_c if row['Position'] == 'C' else factor_other), axis=1) # initial_proj['Own%'] = initial_proj['Own%'].clip(upper=85) initial_proj['Own'] = initial_proj['Own'] * (900 / initial_proj['Own'].sum()) # Drop unnecessary columns and create the final DataFrame Overall_Proj = initial_proj[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']] elif slate_var1 != 'User': # Copy only the necessary columns initial_proj = raw_baselines[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']] # # Define the calculation to be applied # def calculate_own(position, own, mean_own, factor, max_own=85): # return np.where((position == 'C') & (own - mean_own >= 0), # own * (factor * (own - mean_own) / 100) + mean_own, # own) # # Set the factors based on the contest_var1 # factor_c, factor_other = { # 'Small': (10, 5), # 'Medium': (6, 3), # 'Large': (3, 1.5), # }[contest_var1] # # Apply the calculation to the DataFrame # initial_proj['Own%'] = initial_proj.apply(lambda row: calculate_own(row['Position'], row['Own'], initial_proj.loc[initial_proj['Position'] == row['Position'], 'Own'].mean(), factor_c if row['Position'] == 'C' else factor_other), axis=1) # initial_proj['Own%'] = initial_proj['Own%'].clip(upper=85) initial_proj['Own'] = initial_proj['Own'] * (900 / initial_proj['Own'].sum()) # Drop unnecessary columns and create the final DataFrame Overall_Proj = initial_proj[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']] if insert_port == 1: UserPortfolio = portfolio_dataframe[['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']] elif insert_port == 0: UserPortfolio = pd.DataFrame(columns = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']) Overall_Proj.replace('', np.nan, inplace=True) Overall_Proj = Overall_Proj.replace(',','', regex=True) Overall_Proj['Salary'] = Overall_Proj['Salary'].astype(int) Overall_Proj = Overall_Proj.dropna(subset=['Median']) Overall_Proj = Overall_Proj.assign(Value=lambda x: (x.Median / (x.Salary / 1000))) Overall_Proj['Sort_var'] = (Overall_Proj['Median'].rank(ascending=False) + Overall_Proj['Value'].rank(ascending=False)) / 2 Overall_Proj = Overall_Proj.sort_values(by='Sort_var', ascending=False) Overall_Proj['Own'] = np.where((Overall_Proj['Median'] > 0) & (Overall_Proj['Own'] == 0), 1, Overall_Proj['Own']) Overall_Proj = Overall_Proj.loc[Overall_Proj['Own'] > 0] Overall_Proj['Floor'] = Overall_Proj['Median'] * .25 Overall_Proj['Ceiling'] = Overall_Proj['Median'] * 1.75 Overall_Proj['STDev'] = Overall_Proj['Median'] / 4 Teams_used = Overall_Proj['Team'].drop_duplicates().reset_index(drop=True) Teams_used = Teams_used.reset_index() Teams_used['team_item'] = Teams_used['index'] + 1 Teams_used = Teams_used.drop(columns=['index']) Teams_used_dictraw = Teams_used.drop(columns=['team_item']) team_list = Teams_used['Team'].to_list() item_list = Teams_used['team_item'].to_list() FieldStrength_raw = Strength_var + ((30 - len(Teams_used)) * .01) FieldStrength = FieldStrength_raw - (FieldStrength_raw * (20000 / Contest_Size)) if FieldStrength < 0: FieldStrength = Strength_var field_split = Strength_var for checkVar in range(len(team_list)): Overall_Proj['Team'] = Overall_Proj['Team'].replace(team_list, item_list) pgs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('PG')] pgs_raw.dropna(subset=['Median']).reset_index(drop=True) pgs_raw = pgs_raw.reset_index(drop=True) pgs_raw = pgs_raw.sort_values(by=['Median'], ascending=False) sgs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('SG')] sgs_raw.dropna(subset=['Median']).reset_index(drop=True) sgs_raw = sgs_raw.reset_index(drop=True) sgs_raw = sgs_raw.sort_values(by=['Own', 'Value'], ascending=False) sfs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('SF')] sfs_raw.dropna(subset=['Median']).reset_index(drop=True) sfs_raw = sfs_raw.reset_index(drop=True) sfs_raw = sfs_raw.sort_values(by=['Own', 'Value'], ascending=False) pfs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('PF')] pfs_raw.dropna(subset=['Median']).reset_index(drop=True) pfs_raw = pfs_raw.reset_index(drop=True) pfs_raw = pfs_raw.sort_values(by=['Own', 'Median'], ascending=False) cs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('C')] cs_raw.dropna(subset=['Median']).reset_index(drop=True) cs_raw = cs_raw.reset_index(drop=True) cs_raw = cs_raw.sort_values(by=['Own', 'Median'], ascending=False) gs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('G')] gs_raw.dropna(subset=['Median']).reset_index(drop=True) gs_raw = gs_raw.reset_index(drop=True) gs_raw = gs_raw.sort_values(by=['Own', 'Value'], ascending=False) fs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('F')] fs_raw.dropna(subset=['Median']).reset_index(drop=True) fs_raw = fs_raw.reset_index(drop=True) fs_raw = fs_raw.sort_values(by=['Own', 'Value'], ascending=False) pos_players = pd.concat([pgs_raw, sgs_raw, sfs_raw, pfs_raw, cs_raw, gs_raw, fs_raw]) pos_players.dropna(subset=['Median']).reset_index(drop=True) pos_players = pos_players.reset_index(drop=True) if insert_port == 1: try: # Initialize an empty DataFrame for Raw Portfolio Raw_Portfolio = pd.DataFrame() # Loop through each position and split the data accordingly positions = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL'] for pos in positions: temp_df = UserPortfolio[pos].str.split("(", n=1, expand=True) temp_df.columns = [pos, 'Drop'] Raw_Portfolio = pd.concat([Raw_Portfolio, temp_df], axis=1) # Select only necessary columns and strip white spaces CleanPortfolio = Raw_Portfolio[positions].apply(lambda x: x.str.strip()) CleanPortfolio.reset_index(inplace=True) CleanPortfolio['User/Field'] = CleanPortfolio['index'] + 1 CleanPortfolio.drop(columns=['index'], inplace=True) CleanPortfolio.replace('', np.nan, inplace=True) CleanPortfolio.dropna(subset=['PG'], inplace=True) # Create frequency table for players cleaport_players = pd.DataFrame( np.column_stack(np.unique(CleanPortfolio.iloc[:, 0:9].values, return_counts=True)), columns=['Player', 'Freq'] ).sort_values('Freq', ascending=False).reset_index(drop=True) cleaport_players['Freq'] = cleaport_players['Freq'].astype(int) # Merge and update nerf_frame nerf_frame = pd.merge(cleaport_players, Overall_Proj, on='Player', how='left') for col in ['Median', 'Floor', 'Ceiling', 'STDev']: nerf_frame[col] *= 0.90 except: CleanPortfolio = UserPortfolio.reset_index() CleanPortfolio['User/Field'] = CleanPortfolio['index'] + 1 CleanPortfolio.drop(columns=['index'], inplace=True) CleanPortfolio.replace('', np.nan, inplace=True) CleanPortfolio.dropna(subset=['PG'], inplace=True) # Create frequency table for players cleaport_players = pd.DataFrame( np.column_stack(np.unique(CleanPortfolio.iloc[:, 0:9].values, return_counts=True)), columns=['Player', 'Freq'] ).sort_values('Freq', ascending=False).reset_index(drop=True) cleaport_players['Freq'] = cleaport_players['Freq'].astype(int) # Merge and update nerf_frame nerf_frame = pd.merge(cleaport_players, Overall_Proj, on='Player', how='left') for col in ['Median', 'Floor', 'Ceiling', 'STDev']: nerf_frame[col] *= 0.90 elif insert_port == 0: CleanPortfolio = UserPortfolio cleaport_players = pd.DataFrame(np.column_stack(np.unique(CleanPortfolio.iloc[:,0:9].values, return_counts=True)), columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True) cleaport_players['Freq'] = cleaport_players['Freq'].astype(int) nerf_frame = Overall_Proj ref_dict = { 'pos':['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL'], 'pos_dfs':['PG_Table', 'SG_Table', 'SF_Table', 'PF_Table', 'C_Table', 'G_Table', 'F_Table', 'UTIL_Table'], 'pos_dicts':['pg_dict', 'sg_dict', 'sf_dict', 'pf_dict', 'c_dict', 'g_dict', 'f_dict', 'util_dict'] } maps_dict = { 'Floor_map':dict(zip(Overall_Proj.Player,Overall_Proj.Floor)), 'Projection_map':dict(zip(Overall_Proj.Player,Overall_Proj.Median)), 'Ceiling_map':dict(zip(Overall_Proj.Player,Overall_Proj.Ceiling)), 'Salary_map':dict(zip(Overall_Proj.Player,Overall_Proj.Salary)), 'Pos_map':dict(zip(Overall_Proj.Player,Overall_Proj.Position)), 'Own_map':dict(zip(Overall_Proj.Player,Overall_Proj.Own)), 'Team_map':dict(zip(Overall_Proj.Player,Overall_Proj.Team)), 'STDev_map':dict(zip(Overall_Proj.Player,Overall_Proj.STDev)), 'team_check_map':dict(zip(Overall_Proj.Player,Overall_Proj.Team)) } up_dict = { 'Floor_map':dict(zip(cleaport_players.Player,nerf_frame.Floor)), 'Projection_map':dict(zip(cleaport_players.Player,nerf_frame.Median)), 'Ceiling_map':dict(zip(cleaport_players.Player,nerf_frame.Ceiling)), 'Salary_map':dict(zip(cleaport_players.Player,nerf_frame.Salary)), 'Pos_map':dict(zip(cleaport_players.Player,nerf_frame.Position)), 'Own_map':dict(zip(cleaport_players.Player,nerf_frame.Own)), 'Team_map':dict(zip(cleaport_players.Player,nerf_frame.Team)), 'STDev_map':dict(zip(cleaport_players.Player,nerf_frame.STDev)), 'team_check_map':dict(zip(cleaport_players.Player,nerf_frame.Team)) } FinalPortfolio, maps_dict = run_seed_frame(5, Strength_var, strength_grow, Teams_used, 1000000, field_growth) Sim_Winners = sim_contest(2500, FinalPortfolio, CleanPortfolio, maps_dict, up_dict, insert_port) # Initial setup Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners), columns=FinalPortfolio.columns.tolist() + ['Fantasy']) Sim_Winner_Frame['GPP_Proj'] = (Sim_Winner_Frame['Projection'] + Sim_Winner_Frame['Fantasy']) / 2 Sim_Winner_Frame['unique_id'] = Sim_Winner_Frame['Projection'].astype(str) + Sim_Winner_Frame['Salary'].astype(str) + Sim_Winner_Frame['Own'].astype(str) Sim_Winner_Frame = Sim_Winner_Frame.assign(win_count=Sim_Winner_Frame['unique_id'].map(Sim_Winner_Frame['unique_id'].value_counts())) # Type Casting type_cast_dict = {'Salary': int, 'Projection': np.float16, 'Fantasy': np.float16, 'GPP_Proj': np.float32} Sim_Winner_Frame = Sim_Winner_Frame.astype(type_cast_dict) del FinalPortfolio, insert_port, type_cast_dict # Sorting st.session_state.Sim_Winner_Frame = Sim_Winner_Frame.sort_values(by=['win_count', 'GPP_Proj'], ascending= [False, False]).copy().drop_duplicates(subset='unique_id').head(100) st.session_state.Sim_Winner_Frame.drop(columns='unique_id', inplace=True) # Data Copying st.session_state.Sim_Winner_Export = Sim_Winner_Frame.copy() # Data Copying st.session_state.Sim_Winner_Display = Sim_Winner_Frame.copy() # Conditional Replacement columns_to_replace = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL'] if site_var1 == 'Draftkings': replace_dict = dkid_dict elif site_var1 == 'Fanduel': replace_dict = fdid_dict for col in columns_to_replace: st.session_state.Sim_Winner_Export[col].replace(replace_dict, inplace=True) del replace_dict, Sim_Winner_Frame, Sim_Winners st.session_state.player_freq = pd.DataFrame(np.column_stack(np.unique(st.session_state.Sim_Winner_Display.iloc[:,0:8].values, return_counts=True)), columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True) st.session_state.player_freq['Freq'] = st.session_state.player_freq['Freq'].astype(int) st.session_state.player_freq['Position'] = st.session_state.player_freq['Player'].map(maps_dict['Pos_map']) st.session_state.player_freq['Salary'] = st.session_state.player_freq['Player'].map(maps_dict['Salary_map']) st.session_state.player_freq['Proj Own'] = st.session_state.player_freq['Player'].map(maps_dict['Own_map']) / 100 st.session_state.player_freq['Exposure'] = st.session_state.player_freq['Freq']/(2500) st.session_state.player_freq['Edge'] = st.session_state.player_freq['Exposure'] - st.session_state.player_freq['Proj Own'] st.session_state.player_freq['Team'] = st.session_state.player_freq['Player'].map(maps_dict['Team_map']) for checkVar in range(len(team_list)): st.session_state.player_freq['Team'] = st.session_state.player_freq['Team'].replace(item_list, team_list) with st.container(): if 'player_freq' in st.session_state: player_split_var2 = st.radio("Are you wanting to isolate any lineups with specific players?", ('Full Players', 'Specific Players'), key='player_split_var2') if player_split_var2 == 'Specific Players': find_var2 = st.multiselect('Which players must be included in the lineups?', options = st.session_state.player_freq['Player'].unique()) elif player_split_var2 == 'Full Players': find_var2 = st.session_state.player_freq.Player.values.tolist() if player_split_var2 == 'Specific Players': st.session_state.Sim_Winner_Display = st.session_state.Sim_Winner_Frame[np.equal.outer(st.session_state.Sim_Winner_Frame.to_numpy(), find_var2).any(axis=1).all(axis=1)] if player_split_var2 == 'Full Players': st.session_state.Sim_Winner_Display = st.session_state.Sim_Winner_Frame if 'Sim_Winner_Display' in st.session_state: st.dataframe(st.session_state.Sim_Winner_Display.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').background_gradient(cmap='RdYlGn_r', subset=['Own']).format(precision=2), use_container_width = True) if 'Sim_Winner_Export' in st.session_state: st.download_button( label="Export Full Frame", data=st.session_state.Sim_Winner_Export.to_csv().encode('utf-8'), file_name='NBA_consim_export.csv', mime='text/csv', ) with st.container(): # tab1 = st.tabs(['Overall Exposures']) # with tab1: if 'player_freq' in st.session_state: st.dataframe(st.session_state.player_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True) st.download_button( label="Export Exposures", data=st.session_state.player_freq.to_csv().encode('utf-8'), file_name='player_freq_export.csv', mime='text/csv', ) del gcservice_account del dk_roo_raw, fd_roo_raw del t_stamp del dkid_dict, fdid_dict del static_exposure, overall_exposure del insert_port1, Contest_Size, sharp_split, Strength_var, scaling_var, Sort_function, Sim_function, strength_grow, field_growth del raw_baselines del freq_format gc.collect()