Multichem's picture
Update app.py
3e4eab8
raw
history blame
98.8 kB
import streamlit as st
st.set_page_config(layout="wide")
for name in dir():
if not name.startswith('_'):
del globals()[name]
import pulp
import numpy as np
import pandas as pd
import streamlit as st
import gspread
import time
import random
import scipy.stats
import os
@st.cache_resource
def init_conn():
scope = ['https://www.googleapis.com/auth/spreadsheets',
"https://www.googleapis.com/auth/drive"]
credentials = {
"type": "service_account",
"project_id": "sheets-api-connect-378620",
"private_key_id": "1005124050c80d085e2c5b344345715978dd9cc9",
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtKa01beXwc88R\nnPZVQTNPVQuBnbwoOfc66gW3547ja/UEyIGAF112dt/VqHprRafkKGmlg55jqJNt\na4zceLKV+wTm7vBu7lDISTJfGzCf2TrxQYNqwMKE2LOjI69dBM8u4Dcb4k0wcp9v\ntW1ZzLVVuwTvmrg7JBHjiSaB+x5wxm/r3FOiJDXdlAgFlytzqgcyeZMJVKKBQHyJ\njEGg/1720A0numuOCt71w/2G0bDmijuj1e6tH32MwRWcvRNZ19K9ssyDz2S9p68s\nYDhIxX69OWxwScTIHLY6J2t8txf/XMivL/636fPlDADvBEVTdlT606n8CcKUVQeq\npUVdG+lfAgMBAAECggEAP38SUA7B69eTfRpo658ycOs3Amr0JW4H/bb1rNeAul0K\nZhwd/HnU4E07y81xQmey5kN5ZeNrD5EvqkZvSyMJHV0EEahZStwhjCfnDB/cxyix\nZ+kFhv4y9eK+kFpUAhBy5nX6T0O+2T6WvzAwbmbVsZ+X8kJyPuF9m8ldcPlD0sce\ntj8NwVq1ys52eosqs7zi2vjt+eMcaY393l4ls+vNq8Yf27cfyFw45W45CH/97/Nu\n5AmuzlCOAfFF+z4OC5g4rei4E/Qgpxa7/uom+BVfv9G0DIGW/tU6Sne0+37uoGKt\nW6DzhgtebUtoYkG7ZJ05BTXGp2lwgVcNRoPwnKJDxQKBgQDT5wYPUBDW+FHbvZSp\nd1m1UQuXyerqOTA9smFaM8sr/UraeH85DJPEIEk8qsntMBVMhvD3Pw8uIUeFNMYj\naLmZFObsL+WctepXrVo5NB6RtLB/jZYxiKMatMLUJIYtcKIp+2z/YtKiWcLnwotB\nWdCjVnPTxpkurmF2fWP/eewZ+wKBgQDRMtJg7etjvKyjYNQ5fARnCc+XsI3gkBe1\nX9oeXfhyfZFeBXWnZzN1ITgFHplDznmBdxAyYGiQdbbkdKQSghviUQ0igBvoDMYy\n1rWcy+a17Mj98uyNEfmb3X2cC6WpvOZaGHwg9+GY67BThwI3FqHIbyk6Ko09WlTX\nQpRQjMzU7QKBgAfi1iflu+q0LR+3a3vvFCiaToskmZiD7latd9AKk2ocsBd3Woy9\n+hXXecJHPOKV4oUJlJgvAZqe5HGBqEoTEK0wyPNLSQlO/9ypd+0fEnArwFHO7CMF\nycQprAKHJXM1eOOFFuZeQCaInqdPZy1UcV5Szla4UmUZWkk1m24blHzXAoGBAMcA\nyH4qdbxX9AYrC1dvsSRvgcnzytMvX05LU0uF6tzGtG0zVlub4ahvpEHCfNuy44UT\nxRWW/oFFaWjjyFxO5sWggpUqNuHEnRopg3QXx22SRRTGbN45li/+QAocTkgsiRh1\nqEcYZsO4mPCsQqAy6E2p6RcK+Xa+omxvSnVhq0x1AoGAKr8GdkCl4CF6rieLMAQ7\nLNBuuoYGaHoh8l5E2uOQpzwxVy/nMBcAv+2+KqHEzHryUv1owOi6pMLv7A9mTFoS\n18B0QRLuz5fSOsVnmldfC9fpUc6H8cH1SINZpzajqQA74bPwELJjnzrCnH79TnHG\nJuElxA33rFEjbgbzdyrE768=\n-----END PRIVATE KEY-----\n",
"client_email": "gspread-connection@sheets-api-connect-378620.iam.gserviceaccount.com",
"client_id": "106625872877651920064",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40sheets-api-connect-378620.iam.gserviceaccount.com"
}
gc = gspread.service_account_from_dict(credentials)
return gc
gc = init_conn()
game_format = {'Win Percentage': '{:.2%}','First Inning Lead Percentage': '{:.2%}',
'Fifth Inning Lead Percentage': '{:.2%}', '8+ runs': '{:.2%}', 'DK LevX': '{:.2%}', 'FD LevX': '{:.2%}'}
player_roo_format = {'Top_finish': '{:.2%}','Top_5_finish': '{:.2%}', 'Top_10_finish': '{:.2%}', '20+%': '{:.2%}', '2x%': '{:.2%}', '3x%': '{:.2%}',
'4x%': '{:.2%}','GPP%': '{:.2%}'}
freq_format = {'Proj Own': '{:.2%}', 'Exposure': '{:.2%}', 'Edge': '{:.2%}'}
@st.cache_resource(ttl = 300)
def set_slate_teams():
sh = gc.open_by_url('https://docs.google.com/spreadsheets/d/1I_1Ve3F4tftgfLQQoRKOJ351XfEG48s36OxXUKxmgS8/edit#gid=1391856348')
worksheet = sh.worksheet('Site_Info')
raw_display = pd.DataFrame(worksheet.get_all_records())
return raw_display
@st.cache_resource(ttl = 300)
def player_stat_table():
sh = gc.open_by_url('https://docs.google.com/spreadsheets/d/1I_1Ve3F4tftgfLQQoRKOJ351XfEG48s36OxXUKxmgS8/edit#gid=1391856348')
worksheet = sh.worksheet('Player_Projections')
raw_display = pd.DataFrame(worksheet.get_all_records())
return raw_display
@st.cache_resource(ttl = 300)
def load_dk_player_projections():
sh = gc.open_by_url('https://docs.google.com/spreadsheets/d/1I_1Ve3F4tftgfLQQoRKOJ351XfEG48s36OxXUKxmgS8/edit#gid=1391856348')
worksheet = sh.worksheet('DK_ROO')
load_display = pd.DataFrame(worksheet.get_all_records())
load_display.replace('', np.nan, inplace=True)
raw_display = load_display.dropna(subset=['Median'])
del load_display
return raw_display
@st.cache_resource(ttl = 300)
def load_fd_player_projections():
sh = gc.open_by_url('https://docs.google.com/spreadsheets/d/1I_1Ve3F4tftgfLQQoRKOJ351XfEG48s36OxXUKxmgS8/edit#gid=1391856348')
worksheet = sh.worksheet('FD_ROO')
load_display = pd.DataFrame(worksheet.get_all_records())
load_display.replace('', np.nan, inplace=True)
raw_display = load_display.dropna(subset=['Median'])
del load_display
return raw_display
@st.cache_resource(ttl = 300)
def set_export_ids():
sh = gc.open_by_url('https://docs.google.com/spreadsheets/d/1I_1Ve3F4tftgfLQQoRKOJ351XfEG48s36OxXUKxmgS8/edit#gid=1391856348')
worksheet = sh.worksheet('DK_ROO')
load_display = pd.DataFrame(worksheet.get_all_records())
load_display.replace('', np.nan, inplace=True)
raw_display = load_display.dropna(subset=['Median'])
dk_ids = dict(zip(raw_display['Player'], raw_display['player_id']))
worksheet = sh.worksheet('FD_ROO')
load_display = pd.DataFrame(worksheet.get_all_records())
load_display.replace('', np.nan, inplace=True)
raw_display = load_display.dropna(subset=['Median'])
fd_ids = dict(zip(raw_display['Player'], raw_display['player_id']))
del load_display
del raw_display
return dk_ids, fd_ids
@st.cache_data
def convert_df_to_csv(df):
return df.to_csv().encode('utf-8')
def run_seed_frame(seed_depth1, Strength_var, strength_grow, Teams_used, Total_Runs):
RunsVar = 1
seed_depth_def = seed_depth1
Strength_var_def = Strength_var
strength_grow_def = strength_grow
Teams_used_def = Teams_used
Total_Runs_def = Total_Runs
while RunsVar <= seed_depth_def:
if RunsVar <= 3:
FieldStrength = Strength_var_def
RandomPortfolio, maps_dict = get_correlated_portfolio_for_sim(Total_Runs_def * .1)
FinalPortfolio = RandomPortfolio
FinalPortfolio2, maps_dict2 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .1)
FinalPortfolio = pd.concat([FinalPortfolio, FinalPortfolio2], axis=0)
maps_dict.update(maps_dict2)
del FinalPortfolio2
del maps_dict2
elif RunsVar > 3 and RunsVar <= 4:
FieldStrength += (strength_grow_def + ((30 - len(Teams_used_def)) * .001))
FinalPortfolio3, maps_dict3 = get_correlated_portfolio_for_sim(Total_Runs_def * .1)
FinalPortfolio4, maps_dict4 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .1)
FinalPortfolio = pd.concat([FinalPortfolio, FinalPortfolio3], axis=0)
FinalPortfolio = pd.concat([FinalPortfolio, FinalPortfolio4], axis=0)
FinalPortfolio = FinalPortfolio.drop_duplicates(subset = ['Projection', 'Own'],keep = 'last').reset_index(drop = True)
maps_dict.update(maps_dict3)
maps_dict.update(maps_dict4)
del FinalPortfolio3
del maps_dict3
del FinalPortfolio4
del maps_dict4
elif RunsVar > 4:
FieldStrength = 1
FinalPortfolio3, maps_dict3 = get_correlated_portfolio_for_sim(Total_Runs_def * .1)
FinalPortfolio4, maps_dict4 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .1)
FinalPortfolio = pd.concat([FinalPortfolio, FinalPortfolio3], axis=0)
FinalPortfolio = pd.concat([FinalPortfolio, FinalPortfolio4], axis=0)
FinalPortfolio = FinalPortfolio.drop_duplicates(subset = ['Projection', 'Own'],keep = 'last').reset_index(drop = True)
maps_dict.update(maps_dict3)
maps_dict.update(maps_dict4)
del FinalPortfolio3
del maps_dict3
del FinalPortfolio4
del maps_dict4
RunsVar += 1
return FinalPortfolio, maps_dict
def create_stack_options(player_data, wr_var):
merged_frame = pd.DataFrame(columns = ['QB', 'Player'])
data_raw = player_data.sort_values(by='Median', ascending=False)
for team in data_raw['Team'].unique():
data_split = data_raw.loc[data_raw['Team'] == team]
qb_frame = data_split.loc[data_split['Position'] == 'QB'].reset_index()
wr_frame = data_split.loc[data_split['Position'] == 'WR'].iloc[wr_var-1:wr_var]
wr_frame['QB'] = qb_frame['Player'][0]
merge_slice = wr_frame[['QB', 'Player']]
merged_frame = pd.concat([merged_frame, merge_slice])
merged_frame = merged_frame.reset_index()
correl_dict = dict(zip(merged_frame.QB, merged_frame.Player))
del merged_frame
del data_raw
return correl_dict
def create_overall_dfs(pos_players, table_name, dict_name, pos):
if pos == "FLEX":
pos_players = pos_players.sort_values(by='Value', ascending=False)
table_name_raw = pos_players.reset_index(drop=True)
overall_table_name = table_name_raw.head(round(len(table_name_raw)))
overall_table_name = overall_table_name.assign(Var = range(0,len(overall_table_name)))
overall_dict_name = pd.Series(overall_table_name.Player.values, index=overall_table_name.Var).to_dict()
del pos_players
del table_name_raw
elif pos != "FLEX":
table_name_raw = pos_players[pos_players['Position'].str.contains(pos)].reset_index(drop=True)
overall_table_name = table_name_raw.head(round(len(table_name_raw)))
overall_table_name = overall_table_name.assign(Var = range(0,len(overall_table_name)))
overall_dict_name = pd.Series(overall_table_name.Player.values, index=overall_table_name.Var).to_dict()
del pos_players
del table_name_raw
return overall_table_name, overall_dict_name
def get_overall_merged_df():
ref_dict = {
'pos':['RB', 'WR', 'TE', 'FLEX'],
'pos_dfs':['RB_Table', 'WR_Table', 'TE_Table', 'FLEX_Table'],
'pos_dicts':['rb_dict', 'wr_dict', 'te_dict', 'flex_dict']
}
for i in range(0,4):
ref_dict['pos_dfs'][i], ref_dict['pos_dicts'][i] =\
create_overall_dfs(pos_players, ref_dict['pos_dfs'][i], ref_dict['pos_dicts'][i], ref_dict['pos'][i])
df_out = pd.concat(ref_dict['pos_dfs'], ignore_index=True)
return df_out, ref_dict
def calculate_range_var(count, min_val, FieldStrength, field_growth):
var = round(len(count[0]) * FieldStrength)
var = max(var, min_val)
var += round(field_growth)
return min(var, len(count[0]))
def create_random_portfolio(Total_Sample_Size, raw_baselines):
O_merge, full_pos_player_dict = get_overall_merged_df()
max_var = len(raw_baselines[raw_baselines['Position'] == 'QB'])
field_growth_rounded = round(field_growth)
ranges_dict = {}
# Calculate ranges
for df, dict_val, min_val, key in zip(ref_dict['pos_dfs'], ref_dict['pos_dicts'], [10, 20, 10, 30], ['RB', 'WR', 'TE', 'FLEX']):
count = create_overall_dfs(pos_players, df, dict_val, key)
ranges_dict[f"{key.lower()}_range"] = calculate_range_var(count, min_val, FieldStrength, field_growth_rounded)
if max_var <= 10:
ranges_dict['qb_range'] = round(max_var)
ranges_dict['dst_range'] = round(max_var)
elif max_var > 10 and max_var <= 16:
ranges_dict['qb_range'] = round(max_var / 1.5)
ranges_dict['dst_range'] = round(max_var)
elif max_var > 16:
ranges_dict['qb_range'] = round(max_var / 2)
ranges_dict['dst_range'] = round(max_var)
# Generate unique ranges
# for key, value in ranges_dict.items():
# ranges_dict[f"{key}_Uniques"] = list(range(0, value, 1))
# Generate random portfolios
rng = np.random.default_rng()
total_elements = [1, 2, 3, 1, 1, 1]
keys = ['qb', 'rb', 'wr', 'te', 'flex', 'dst']
all_choices = [rng.choice(ranges_dict[f"{key}_range"], size=(Total_Sample_Size, elem)) for key, elem in zip(keys, total_elements)]
RandomPortfolio = pd.DataFrame(np.hstack(all_choices), columns=['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST'])
RandomPortfolio['User/Field'] = 0
del O_merge
return RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict
def get_correlated_portfolio_for_sim(Total_Sample_Size):
sizesplit = round(Total_Sample_Size * sharp_split)
RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict = create_random_portfolio(sizesplit, raw_baselines)
stack_num = random.randint(1, 3)
stacking_dict = create_stack_options(raw_baselines, stack_num)
# # Create a dictionary for mapping positions to their corresponding dictionaries
# dict_map = {
# 'QB': qb_dict,
# 'RB1': full_pos_player_dict['pos_dicts'][0],
# 'RB2': full_pos_player_dict['pos_dicts'][0],
# 'WR1': full_pos_player_dict['pos_dicts'][1],
# 'WR2': full_pos_player_dict['pos_dicts'][1],
# 'WR3': full_pos_player_dict['pos_dicts'][1],
# 'TE': full_pos_player_dict['pos_dicts'][2],
# 'FLEX': full_pos_player_dict['pos_dicts'][3],
# 'DST': def_dict
# }
# # Apply mapping for each position
# for pos, mapping in dict_map.items():
# RandomPortfolio[pos] = RandomPortfolio[pos].map(mapping).astype("string[pyarrow]")
# # This part appears to be for filtering. Consider if it can be optimized depending on the data characteristics
# RandomPortfolio['plyr_list'] = RandomPortfolio.values.tolist()
# RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x)))
# RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 10].drop(columns=['plyr_list','plyr_count']).reset_index(drop=True)
RandomPortfolio['QB'] = pd.Series(list(RandomPortfolio['QB'].map(qb_dict)), dtype="string[pyarrow]")
RandomPortfolio['RB1'] = pd.Series(list(RandomPortfolio['RB1'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
RandomPortfolio['RB2'] = pd.Series(list(RandomPortfolio['RB2'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
RandomPortfolio['WR1'] = pd.Series(list(RandomPortfolio['QB'].map(stacking_dict)), dtype="string[pyarrow]")
RandomPortfolio['WR2'] = pd.Series(list(RandomPortfolio['WR2'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
RandomPortfolio['WR3'] = pd.Series(list(RandomPortfolio['WR3'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
RandomPortfolio['TE'] = pd.Series(list(RandomPortfolio['TE'].map(full_pos_player_dict['pos_dicts'][2])), dtype="string[pyarrow]")
RandomPortfolio['FLEX'] = pd.Series(list(RandomPortfolio['FLEX'].map(full_pos_player_dict['pos_dicts'][3])), dtype="string[pyarrow]")
RandomPortfolio['DST'] = pd.Series(list(RandomPortfolio['DST'].map(def_dict)), dtype="string[pyarrow]")
RandomPortfolio['plyr_list'] = RandomPortfolio[RandomPortfolio.columns.values.tolist()].values.tolist()
RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x)))
RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 10].drop(columns=['plyr_list','plyr_count']).\
reset_index(drop=True)
del sizesplit
del full_pos_player_dict
del ranges_dict
del stack_num
del stacking_dict
RandomPortfolio['QBs'] = RandomPortfolio['QB'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['RB1s'] = RandomPortfolio['RB1'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['RB2s'] = RandomPortfolio['RB2'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['WR1s'] = RandomPortfolio['WR1'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['WR2s'] = RandomPortfolio['WR2'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['WR3s'] = RandomPortfolio['WR3'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['TEs'] = RandomPortfolio['TE'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['FLEXs'] = RandomPortfolio['FLEX'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['DSTs'] = RandomPortfolio['DST'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['QBp'] = RandomPortfolio['QB'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['RB1p'] = RandomPortfolio['RB1'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['RB2p'] = RandomPortfolio['RB2'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['WR1p'] = RandomPortfolio['WR1'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['WR2p'] = RandomPortfolio['WR2'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['WR3p'] = RandomPortfolio['WR3'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['TEp'] = RandomPortfolio['TE'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['FLEXp'] = RandomPortfolio['FLEX'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['DSTp'] = RandomPortfolio['DST'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['QBo'] = RandomPortfolio['QB'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['RB1o'] = RandomPortfolio['RB1'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['RB2o'] = RandomPortfolio['RB2'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['WR1o'] = RandomPortfolio['WR1'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['WR2o'] = RandomPortfolio['WR2'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['WR3o'] = RandomPortfolio['WR3'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['TEo'] = RandomPortfolio['TE'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['FLEXo'] = RandomPortfolio['FLEX'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['DSTo'] = RandomPortfolio['DST'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortArray = RandomPortfolio.to_numpy()
del RandomPortfolio
RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,10:19].astype(int))]
RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,19:28].astype(np.double))]
RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,28:37].astype(np.double))]
RandomPortArrayOut = np.delete(RandomPortArray, np.s_[10:37], axis=1)
RandomPortfolioDF = pd.DataFrame(RandomPortArrayOut, columns = ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'User/Field', 'Salary', 'Projection', 'Own'])
RandomPortfolioDF = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
del RandomPortArray
del RandomPortArrayOut
if insert_port == 1:
CleanPortfolio['Salary'] = sum([CleanPortfolio['QB'].map(maps_dict['Salary_map']),
CleanPortfolio['RB1'].map(maps_dict['Salary_map']),
CleanPortfolio['RB2'].map(maps_dict['Salary_map']),
CleanPortfolio['WR1'].map(maps_dict['Salary_map']),
CleanPortfolio['WR2'].map(maps_dict['Salary_map']),
CleanPortfolio['WR3'].map(maps_dict['Salary_map']),
CleanPortfolio['TE'].map(maps_dict['Salary_map']),
CleanPortfolio['FLEX'].map(maps_dict['Salary_map']),
CleanPortfolio['DST'].map(maps_dict['Salary_map'])
]).astype(np.int16)
if insert_port == 1:
CleanPortfolio['Projection'] = sum([CleanPortfolio['QB'].map(up_dict['Projection_map']),
CleanPortfolio['RB1'].map(up_dict['Projection_map']),
CleanPortfolio['RB2'].map(up_dict['Projection_map']),
CleanPortfolio['WR1'].map(up_dict['Projection_map']),
CleanPortfolio['WR2'].map(up_dict['Projection_map']),
CleanPortfolio['WR3'].map(up_dict['Projection_map']),
CleanPortfolio['TE'].map(up_dict['Projection_map']),
CleanPortfolio['FLEX'].map(up_dict['Projection_map']),
CleanPortfolio['DST'].map(up_dict['Projection_map'])
]).astype(np.float16)
if insert_port == 1:
CleanPortfolio['Own'] = sum([CleanPortfolio['QB'].map(maps_dict['Own_map']),
CleanPortfolio['RB1'].map(maps_dict['Own_map']),
CleanPortfolio['RB2'].map(maps_dict['Own_map']),
CleanPortfolio['WR1'].map(maps_dict['Own_map']),
CleanPortfolio['WR2'].map(maps_dict['Own_map']),
CleanPortfolio['WR3'].map(maps_dict['Own_map']),
CleanPortfolio['TE'].map(maps_dict['Own_map']),
CleanPortfolio['FLEX'].map(maps_dict['Own_map']),
CleanPortfolio['DST'].map(maps_dict['Own_map'])
]).astype(np.float16)
if site_var1 == 'Draftkings':
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 50000].reset_index(drop=True)
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (49500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
elif site_var1 == 'Fanduel':
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 60000].reset_index(drop=True)
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (59500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
RandomPortfolio = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
RandomPortfolio = RandomPortfolio[['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'User/Field', 'Salary', 'Projection', 'Own']]
return RandomPortfolio, maps_dict
def get_uncorrelated_portfolio_for_sim(Total_Sample_Size):
sizesplit = round(Total_Sample_Size * (1-sharp_split))
RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict = create_random_portfolio(sizesplit, raw_baselines)
RandomPortfolio['QB'] = pd.Series(list(RandomPortfolio['QB'].map(qb_dict)), dtype="string[pyarrow]")
RandomPortfolio['RB1'] = pd.Series(list(RandomPortfolio['RB1'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
RandomPortfolio['RB2'] = pd.Series(list(RandomPortfolio['RB2'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
RandomPortfolio['WR1'] = pd.Series(list(RandomPortfolio['WR1'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
RandomPortfolio['WR2'] = pd.Series(list(RandomPortfolio['WR2'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
RandomPortfolio['WR3'] = pd.Series(list(RandomPortfolio['WR3'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
RandomPortfolio['TE'] = pd.Series(list(RandomPortfolio['TE'].map(full_pos_player_dict['pos_dicts'][2])), dtype="string[pyarrow]")
RandomPortfolio['FLEX'] = pd.Series(list(RandomPortfolio['FLEX'].map(full_pos_player_dict['pos_dicts'][3])), dtype="string[pyarrow]")
RandomPortfolio['DST'] = pd.Series(list(RandomPortfolio['DST'].map(def_dict)), dtype="string[pyarrow]")
RandomPortfolio['plyr_list'] = RandomPortfolio[RandomPortfolio.columns.values.tolist()].values.tolist()
RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x)))
RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 10].drop(columns=['plyr_list','plyr_count']).\
reset_index(drop=True)
del sizesplit
del full_pos_player_dict
del ranges_dict
RandomPortfolio['QBs'] = RandomPortfolio['QB'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['RB1s'] = RandomPortfolio['RB1'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['RB2s'] = RandomPortfolio['RB2'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['WR1s'] = RandomPortfolio['WR1'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['WR2s'] = RandomPortfolio['WR2'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['WR3s'] = RandomPortfolio['WR3'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['TEs'] = RandomPortfolio['TE'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['FLEXs'] = RandomPortfolio['FLEX'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['DSTs'] = RandomPortfolio['DST'].map(maps_dict['Salary_map']).astype(np.int32)
RandomPortfolio['QBp'] = RandomPortfolio['QB'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['RB1p'] = RandomPortfolio['RB1'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['RB2p'] = RandomPortfolio['RB2'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['WR1p'] = RandomPortfolio['WR1'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['WR2p'] = RandomPortfolio['WR2'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['WR3p'] = RandomPortfolio['WR3'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['TEp'] = RandomPortfolio['TE'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['FLEXp'] = RandomPortfolio['FLEX'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['DSTp'] = RandomPortfolio['DST'].map(maps_dict['Projection_map']).astype(np.float16)
RandomPortfolio['QBo'] = RandomPortfolio['QB'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['RB1o'] = RandomPortfolio['RB1'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['RB2o'] = RandomPortfolio['RB2'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['WR1o'] = RandomPortfolio['WR1'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['WR2o'] = RandomPortfolio['WR2'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['WR3o'] = RandomPortfolio['WR3'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['TEo'] = RandomPortfolio['TE'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['FLEXo'] = RandomPortfolio['FLEX'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortfolio['DSTo'] = RandomPortfolio['DST'].map(maps_dict['Own_map']).astype(np.float16)
RandomPortArray = RandomPortfolio.to_numpy()
del RandomPortfolio
RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,10:19].astype(int))]
RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,19:28].astype(np.double))]
RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,28:37].astype(np.double))]
RandomPortArrayOut = np.delete(RandomPortArray, np.s_[10:37], axis=1)
RandomPortfolioDF = pd.DataFrame(RandomPortArrayOut, columns = ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'User/Field', 'Salary', 'Projection', 'Own'])
RandomPortfolioDF = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
del RandomPortArray
del RandomPortArrayOut
# st.table(RandomPortfolioDF.head(50))
if insert_port == 1:
CleanPortfolio['Salary'] = sum([CleanPortfolio['QB'].map(maps_dict['Salary_map']),
CleanPortfolio['RB1'].map(maps_dict['Salary_map']),
CleanPortfolio['RB2'].map(maps_dict['Salary_map']),
CleanPortfolio['WR1'].map(maps_dict['Salary_map']),
CleanPortfolio['WR2'].map(maps_dict['Salary_map']),
CleanPortfolio['WR3'].map(maps_dict['Salary_map']),
CleanPortfolio['TE'].map(maps_dict['Salary_map']),
CleanPortfolio['FLEX'].map(maps_dict['Salary_map']),
CleanPortfolio['DST'].map(maps_dict['Salary_map'])
]).astype(np.int16)
if insert_port == 1:
CleanPortfolio['Projection'] = sum([CleanPortfolio['QB'].map(up_dict['Projection_map']),
CleanPortfolio['RB1'].map(up_dict['Projection_map']),
CleanPortfolio['RB2'].map(up_dict['Projection_map']),
CleanPortfolio['WR1'].map(up_dict['Projection_map']),
CleanPortfolio['WR2'].map(up_dict['Projection_map']),
CleanPortfolio['WR3'].map(up_dict['Projection_map']),
CleanPortfolio['TE'].map(up_dict['Projection_map']),
CleanPortfolio['FLEX'].map(up_dict['Projection_map']),
CleanPortfolio['DST'].map(up_dict['Projection_map'])
]).astype(np.float16)
if insert_port == 1:
CleanPortfolio['Own'] = sum([CleanPortfolio['QB'].map(maps_dict['Own_map']),
CleanPortfolio['RB1'].map(maps_dict['Own_map']),
CleanPortfolio['RB2'].map(maps_dict['Own_map']),
CleanPortfolio['WR1'].map(maps_dict['Own_map']),
CleanPortfolio['WR2'].map(maps_dict['Own_map']),
CleanPortfolio['WR3'].map(maps_dict['Own_map']),
CleanPortfolio['TE'].map(maps_dict['Own_map']),
CleanPortfolio['FLEX'].map(maps_dict['Own_map']),
CleanPortfolio['DST'].map(maps_dict['Own_map'])
]).astype(np.float16)
if site_var1 == 'Draftkings':
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 50000].reset_index(drop=True)
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (49500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
elif site_var1 == 'Fanduel':
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 60000].reset_index(drop=True)
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (59500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
RandomPortfolio = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
RandomPortfolio = RandomPortfolio[['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'User/Field', 'Salary', 'Projection', 'Own']]
return RandomPortfolio, maps_dict
player_stats = player_stat_table()
dk_roo_raw = load_dk_player_projections()
fd_roo_raw = load_fd_player_projections()
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
site_slates = set_slate_teams()
dkid_dict, fdid_dict = set_export_ids()
static_exposure = pd.DataFrame(columns=['Player', 'count'])
overall_exposure = pd.DataFrame(columns=['Player', 'count'])
tab1, tab2 = st.tabs(['Uploads', 'Contest Sim'])
with tab1:
with st.container():
col1, col2 = st.columns([3, 3])
with col1:
st.info("The Projections file can have any columns in any order, but must contain columns explicitly named: 'Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', and 'Own'. Upload your projections first to avoid an error message.")
proj_file = st.file_uploader("Upload Projections File", key = 'proj_uploader')
if proj_file is not None:
try:
proj_dataframe = pd.read_csv(proj_file)
proj_dataframe = proj_dataframe.dropna(subset='Median')
proj_dataframe['Player'] = proj_dataframe['Player'].str.strip()
try:
proj_dataframe['Own'] = proj_dataframe['Own'].str.strip('%').astype(float)
except:
pass
except:
proj_dataframe = pd.read_excel(proj_file)
proj_dataframe = proj_dataframe.dropna(subset='Median')
proj_dataframe['Player'] = proj_dataframe['Player'].str.strip()
try:
proj_dataframe['Own'] = proj_dataframe['Own'].str.strip('%').astype(float)
except:
pass
st.table(proj_dataframe.head(10))
player_salary_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Salary))
player_proj_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Median))
player_own_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Own))
player_team_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Team))
with col2:
st.info("The Portfolio file must contain only columns in order and explicitly named: 'QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', and 'DST'. Upload your projections first to avoid an error message.")
portfolio_file = st.file_uploader("Upload Portfolio File", key = 'portfolio_uploader')
if portfolio_file is not None:
try:
portfolio_dataframe = pd.read_csv(portfolio_file)
except:
portfolio_dataframe = pd.read_excel(portfolio_file)
try:
try:
portfolio_dataframe.columns=["QB", "RB1", "RB2", "WR1", "WR2", "WR3", "TE", "FLEX", "DST"]
split_portfolio = portfolio_dataframe
split_portfolio[['QB', 'QB_ID']] = split_portfolio.QB.str.split("(", n=1, expand = True)
split_portfolio[['RB1', 'RB1_ID']] = split_portfolio.RB1.str.split("(", n=1, expand = True)
split_portfolio[['RB2', 'RB2_ID']] = split_portfolio.RB2.str.split("(", n=1, expand = True)
split_portfolio[['WR1', 'WR1_ID']] = split_portfolio.WR1.str.split("(", n=1, expand = True)
split_portfolio[['WR2', 'WR2_ID']] = split_portfolio.WR2.str.split("(", n=1, expand = True)
split_portfolio[['WR3', 'WR3_ID']] = split_portfolio.WR3.str.split("(", n=1, expand = True)
split_portfolio[['TE', 'TE_ID']] = split_portfolio.TE.str.split("(", n=1, expand = True)
split_portfolio[['FLEX', 'FLEX_ID']] = split_portfolio.FLEX.str.split("(", n=1, expand = True)
split_portfolio[['DST', 'DST_ID']] = split_portfolio.DST.str.split("(", n=1, expand = True)
split_portfolio['QB'] = split_portfolio['QB'].str.strip()
split_portfolio['RB1'] = split_portfolio['RB1'].str.strip()
split_portfolio['RB2'] = split_portfolio['RB2'].str.strip()
split_portfolio['WR1'] = split_portfolio['WR1'].str.strip()
split_portfolio['WR2'] = split_portfolio['WR2'].str.strip()
split_portfolio['WR3'] = split_portfolio['WR3'].str.strip()
split_portfolio['TE'] = split_portfolio['TE'].str.strip()
split_portfolio['FLEX'] = split_portfolio['FLEX'].str.strip()
split_portfolio['DST'] = split_portfolio['DST'].str.strip()
st.table(split_portfolio.head(10))
split_portfolio['Salary'] = sum([split_portfolio['QB'].map(player_salary_dict),
split_portfolio['RB1'].map(player_salary_dict),
split_portfolio['RB2'].map(player_salary_dict),
split_portfolio['WR1'].map(player_salary_dict),
split_portfolio['WR2'].map(player_salary_dict),
split_portfolio['WR3'].map(player_salary_dict),
split_portfolio['TE'].map(player_salary_dict),
split_portfolio['FLEX'].map(player_salary_dict),
split_portfolio['DST'].map(player_salary_dict)])
split_portfolio['Projection'] = sum([split_portfolio['QB'].map(player_proj_dict),
split_portfolio['RB1'].map(player_proj_dict),
split_portfolio['RB2'].map(player_proj_dict),
split_portfolio['WR1'].map(player_proj_dict),
split_portfolio['WR2'].map(player_proj_dict),
split_portfolio['WR3'].map(player_proj_dict),
split_portfolio['TE'].map(player_proj_dict),
split_portfolio['FLEX'].map(player_proj_dict),
split_portfolio['DST'].map(player_proj_dict)])
split_portfolio['Ownership'] = sum([split_portfolio['QB'].map(player_own_dict),
split_portfolio['RB1'].map(player_own_dict),
split_portfolio['RB2'].map(player_own_dict),
split_portfolio['WR1'].map(player_own_dict),
split_portfolio['WR2'].map(player_own_dict),
split_portfolio['WR3'].map(player_own_dict),
split_portfolio['TE'].map(player_own_dict),
split_portfolio['FLEX'].map(player_own_dict),
split_portfolio['DST'].map(player_own_dict)])
split_portfolio['QB_team'] = split_portfolio['QB'].map(player_team_dict)
split_portfolio['RB1_team'] = split_portfolio['RB1'].map(player_team_dict)
split_portfolio['RB2_team'] = split_portfolio['RB2'].map(player_team_dict)
split_portfolio['WR1_team'] = split_portfolio['WR1'].map(player_team_dict)
split_portfolio['WR2_team'] = split_portfolio['WR2'].map(player_team_dict)
split_portfolio['WR3_team'] = split_portfolio['WR3'].map(player_team_dict)
split_portfolio['TE_team'] = split_portfolio['TE'].map(player_team_dict)
split_portfolio['FLEX_team'] = split_portfolio['FLEX'].map(player_team_dict)
split_portfolio['DST_team'] = split_portfolio['DST'].map(player_team_dict)
split_portfolio = split_portfolio[['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'Salary', 'Projection', 'Ownership', 'QB_team',
'RB1_team', 'RB2_team', 'WR1_team', 'WR2_team', 'WR3_team', 'TE_team', 'FLEX_team', 'DST_team']]
split_portfolio['Main_Stack'] = split_portfolio.iloc[:, 15:19].apply(lambda row: row.value_counts().nlargest(2).index[0],axis=1)
split_portfolio['Main_Stack_Size'] = split_portfolio.iloc[:, 15:19].apply(lambda row: row.value_counts().nlargest(2).values[0],axis=1)
split_portfolio['Main_Stack_Size'] = split_portfolio['Main_Stack_Size'] - 1
except:
portfolio_dataframe.columns=["QB", "RB1", "RB2", "WR1", "WR2", "WR3", "TE", "FLEX", "DST"]
split_portfolio = portfolio_dataframe
split_portfolio[['QB_ID', 'QB']] = split_portfolio.QB.str.split(":", n=1, expand = True)
split_portfolio[['RB1_ID', 'RB1']] = split_portfolio.RB1.str.split(":", n=1, expand = True)
split_portfolio[['RB2_ID', 'RB2']] = split_portfolio.RB2.str.split(":", n=1, expand = True)
split_portfolio[['WR1_ID', 'WR1']] = split_portfolio.WR1.str.split(":", n=1, expand = True)
split_portfolio[['WR2_ID', 'WR2']] = split_portfolio.WR2.str.split(":", n=1, expand = True)
split_portfolio[['WR3_ID', 'WR3']] = split_portfolio.WR3.str.split(":", n=1, expand = True)
split_portfolio[['TE_ID', 'TE']] = split_portfolio.TE.str.split(":", n=1, expand = True)
split_portfolio[['FLEX_ID', 'FLEX']] = split_portfolio.FLEX.str.split(":", n=1, expand = True)
split_portfolio[['DST_ID', 'DST']] = split_portfolio.DST.str.split(":", n=1, expand = True)
split_portfolio['QB'] = split_portfolio['QB'].str.strip()
split_portfolio['RB1'] = split_portfolio['RB1'].str.strip()
split_portfolio['RB2'] = split_portfolio['RB2'].str.strip()
split_portfolio['WR1'] = split_portfolio['WR1'].str.strip()
split_portfolio['WR2'] = split_portfolio['WR2'].str.strip()
split_portfolio['WR3'] = split_portfolio['WR3'].str.strip()
split_portfolio['TE'] = split_portfolio['TE'].str.strip()
split_portfolio['FLEX'] = split_portfolio['FLEX'].str.strip()
split_portfolio['DST'] = split_portfolio['DST'].str.strip()
split_portfolio['Salary'] = sum([split_portfolio['QB'].map(player_salary_dict),
split_portfolio['RB1'].map(player_salary_dict),
split_portfolio['RB2'].map(player_salary_dict),
split_portfolio['WR1'].map(player_salary_dict),
split_portfolio['WR2'].map(player_salary_dict),
split_portfolio['WR3'].map(player_salary_dict),
split_portfolio['TE'].map(player_salary_dict),
split_portfolio['FLEX'].map(player_salary_dict),
split_portfolio['DST'].map(player_salary_dict)])
split_portfolio['Projection'] = sum([split_portfolio['QB'].map(player_proj_dict),
split_portfolio['RB1'].map(player_proj_dict),
split_portfolio['RB2'].map(player_proj_dict),
split_portfolio['WR1'].map(player_proj_dict),
split_portfolio['WR2'].map(player_proj_dict),
split_portfolio['WR3'].map(player_proj_dict),
split_portfolio['TE'].map(player_proj_dict),
split_portfolio['FLEX'].map(player_proj_dict),
split_portfolio['DST'].map(player_proj_dict)])
st.table(split_portfolio.head(10))
split_portfolio['Ownership'] = sum([split_portfolio['QB'].map(player_own_dict),
split_portfolio['RB1'].map(player_own_dict),
split_portfolio['RB2'].map(player_own_dict),
split_portfolio['WR1'].map(player_own_dict),
split_portfolio['WR2'].map(player_own_dict),
split_portfolio['WR3'].map(player_own_dict),
split_portfolio['TE'].map(player_own_dict),
split_portfolio['FLEX'].map(player_own_dict),
split_portfolio['DST'].map(player_own_dict)])
split_portfolio['QB_team'] = split_portfolio['QB'].map(player_team_dict)
split_portfolio['RB1_team'] = split_portfolio['RB1'].map(player_team_dict)
split_portfolio['RB2_team'] = split_portfolio['RB2'].map(player_team_dict)
split_portfolio['WR1_team'] = split_portfolio['WR1'].map(player_team_dict)
split_portfolio['WR2_team'] = split_portfolio['WR2'].map(player_team_dict)
split_portfolio['WR3_team'] = split_portfolio['WR3'].map(player_team_dict)
split_portfolio['TE_team'] = split_portfolio['TE'].map(player_team_dict)
split_portfolio['FLEX_team'] = split_portfolio['FLEX'].map(player_team_dict)
split_portfolio['DST_team'] = split_portfolio['DST'].map(player_team_dict)
split_portfolio = split_portfolio[['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'Salary', 'Projection', 'Ownership', 'QB_team',
'RB1_team', 'RB2_team', 'WR1_team', 'WR2_team', 'WR3_team', 'TE_team', 'FLEX_team', 'DST_team']]
split_portfolio['Main_Stack'] = split_portfolio.iloc[:, 15:19].apply(lambda row: row.value_counts().nlargest(2).index[0],axis=1)
split_portfolio['Main_Stack_Size'] = split_portfolio.iloc[:, 15:19].apply(lambda row: row.value_counts().nlargest(2).values[0],axis=1)
split_portfolio['Main_Stack_Size'] = split_portfolio['Main_Stack_Size'] - 1
except:
split_portfolio = portfolio_dataframe
split_portfolio['Salary'] = sum([split_portfolio['QB'].map(player_salary_dict),
split_portfolio['RB1'].map(player_salary_dict),
split_portfolio['RB2'].map(player_salary_dict),
split_portfolio['WR1'].map(player_salary_dict),
split_portfolio['WR2'].map(player_salary_dict),
split_portfolio['WR3'].map(player_salary_dict),
split_portfolio['TE'].map(player_salary_dict),
split_portfolio['FLEX'].map(player_salary_dict),
split_portfolio['DST'].map(player_salary_dict)])
split_portfolio['Projection'] = sum([split_portfolio['QB'].map(player_proj_dict),
split_portfolio['RB1'].map(player_proj_dict),
split_portfolio['RB2'].map(player_proj_dict),
split_portfolio['WR1'].map(player_proj_dict),
split_portfolio['WR2'].map(player_proj_dict),
split_portfolio['WR3'].map(player_proj_dict),
split_portfolio['TE'].map(player_proj_dict),
split_portfolio['FLEX'].map(player_proj_dict),
split_portfolio['DST'].map(player_proj_dict)])
split_portfolio['Ownership'] = sum([split_portfolio['QB'].map(player_own_dict),
split_portfolio['RB1'].map(player_own_dict),
split_portfolio['RB2'].map(player_own_dict),
split_portfolio['WR1'].map(player_own_dict),
split_portfolio['WR2'].map(player_own_dict),
split_portfolio['WR3'].map(player_own_dict),
split_portfolio['TE'].map(player_own_dict),
split_portfolio['FLEX'].map(player_own_dict),
split_portfolio['DST'].map(player_own_dict)])
split_portfolio['QB_team'] = split_portfolio['QB'].map(player_team_dict)
split_portfolio['RB1_team'] = split_portfolio['RB1'].map(player_team_dict)
split_portfolio['RB2_team'] = split_portfolio['RB2'].map(player_team_dict)
split_portfolio['WR1_team'] = split_portfolio['WR1'].map(player_team_dict)
split_portfolio['WR2_team'] = split_portfolio['WR2'].map(player_team_dict)
split_portfolio['WR3_team'] = split_portfolio['WR3'].map(player_team_dict)
split_portfolio['TE_team'] = split_portfolio['TE'].map(player_team_dict)
split_portfolio['FLEX_team'] = split_portfolio['FLEX'].map(player_team_dict)
split_portfolio['DST_team'] = split_portfolio['DST'].map(player_team_dict)
split_portfolio = split_portfolio[['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'Salary', 'Projection', 'Ownership', 'QB_team',
'RB1_team', 'RB2_team', 'WR1_team', 'WR2_team', 'WR3_team', 'TE_team', 'FLEX_team', 'DST_team']]
split_portfolio['Main_Stack'] = split_portfolio.iloc[:, 15:19].apply(lambda row: row.value_counts().nlargest(2).index[0],axis=1)
split_portfolio['Main_Stack_Size'] = split_portfolio.iloc[:, 15:19].apply(lambda row: row.value_counts().nlargest(2).values[0],axis=1)
split_portfolio['Main_Stack_Size'] = split_portfolio['Main_Stack_Size'] - 1
for player_cols in split_portfolio.iloc[:, :9]:
static_col_raw = split_portfolio[player_cols].value_counts()
static_col = static_col_raw.to_frame()
static_col.reset_index(inplace=True)
static_col.columns = ['Player', 'count']
static_exposure = pd.concat([static_exposure, static_col], ignore_index=True)
static_exposure['Exposure'] = static_exposure['count'] / len(split_portfolio)
static_exposure = static_exposure[['Player', 'Exposure']]
del player_salary_dict
del player_proj_dict
del player_own_dict
del player_team_dict
del static_col_raw
del static_col
with st.container():
col1, col2 = st.columns([3, 3])
if portfolio_file is not None:
with col1:
team_split_var1 = st.radio("Are you wanting to isolate any lineups with specific main stacks?", ('Full Portfolio', 'Specific Stacks'))
if team_split_var1 == 'Specific Stacks':
team_var1 = st.multiselect('Which main stacks would you like to include in the Portfolio?', options = split_portfolio['Main_Stack'].unique())
elif team_split_var1 == 'Full Portfolio':
team_var1 = split_portfolio.Main_Stack.values.tolist()
with col2:
player_split_var1 = st.radio("Are you wanting to isolate any lineups with specific players?", ('Full Players', 'Specific Players'))
if player_split_var1 == 'Specific Players':
find_var1 = st.multiselect('Which players must be included in the lineups?', options = static_exposure['Player'].unique())
elif player_split_var1 == 'Full Players':
find_var1 = static_exposure.Player.values.tolist()
split_portfolio = split_portfolio[split_portfolio['Main_Stack'].isin(team_var1)]
if player_split_var1 == 'Specific Players':
split_portfolio = split_portfolio[np.equal.outer(split_portfolio.to_numpy(copy=False), find_var1).any(axis=1).all(axis=1)]
elif player_split_var1 == 'Full Players':
split_portfolio = split_portfolio
for player_cols in split_portfolio.iloc[:, :9]:
exposure_col_raw = split_portfolio[player_cols].value_counts()
exposure_col = exposure_col_raw.to_frame()
exposure_col.reset_index(inplace=True)
exposure_col.columns = ['Player', 'count']
overall_exposure = pd.concat([overall_exposure, exposure_col], ignore_index=True)
overall_exposure['Exposure'] = overall_exposure['count'] / len(split_portfolio)
overall_exposure = overall_exposure.groupby('Player').sum()
overall_exposure.reset_index(inplace=True)
overall_exposure = overall_exposure[['Player', 'Exposure']]
overall_exposure = overall_exposure.set_index('Player')
overall_exposure = overall_exposure.sort_values(by='Exposure', ascending=False)
overall_exposure['Exposure'] = overall_exposure['Exposure'].astype(float).map(lambda n: '{:.2%}'.format(n))
with st.container():
col1, col2 = st.columns([1, 6])
with col1:
if portfolio_file is not None:
st.header('Exposure View')
st.dataframe(overall_exposure)
with col2:
if portfolio_file is not None:
st.header('Portfolio View')
split_portfolio = split_portfolio.reset_index()
split_portfolio['Lineup'] = split_portfolio['index'] + 1
display_portfolio = split_portfolio[['Lineup', 'QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'Salary', 'Main_Stack', 'Main_Stack_Size', 'Projection', 'Ownership']]
display_portfolio = display_portfolio.set_index('Lineup')
st.dataframe(display_portfolio.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').background_gradient(cmap='RdYlGn_r', subset=['Ownership']).format(precision=2))
del split_portfolio
del exposure_col_raw
del exposure_col
with tab2:
col1, col2 = st.columns([1, 7])
with col1:
st.info(t_stamp)
if st.button("Load/Reset Data", key='reset1'):
st.cache_data.clear()
dk_roo_raw = load_dk_player_projections()
fd_roo_raw = load_fd_player_projections()
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
site_slates = set_slate_teams()
dkid_dict, fdid_dict = set_export_ids()
slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Thurs-Mon Slate', 'User'))
site_var1 = st.radio("What site are you working with?", ('Draftkings', 'Fanduel'))
if site_var1 == 'Draftkings':
if slate_var1 == 'User':
raw_baselines = proj_dataframe[['Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', 'Own']]
elif slate_var1 != 'User':
raw_baselines = dk_roo_raw[dk_roo_raw['slate'] == str(slate_var1)]
raw_baselines = raw_baselines[raw_baselines['version'] == 'overall']
elif site_var1 == 'Fanduel':
if slate_var1 == 'User':
raw_baselines = proj_dataframe
elif slate_var1 != 'User':
raw_baselines = fd_roo_raw[fd_roo_raw['slate'] == str(slate_var1)]
raw_baselines = raw_baselines[raw_baselines['version'] == 'overall']
st.info("If you are uploading a portfolio, note that there is an adjustments to projections and deviation mapping to prevent 'Projection Bias' and create a fair simulation")
insert_port1 = st.selectbox("Are you uploading a portfolio?", ('No', 'Yes'), key='insert_port1')
if insert_port1 == 'Yes':
insert_port = 1
elif insert_port1 == 'No':
insert_port = 0
contest_var1 = st.selectbox("What contest size are you simulating?", ('Small', 'Medium', 'Large'))
if contest_var1 == 'Small':
Contest_Size = 1000
elif contest_var1 == 'Medium':
Contest_Size = 5000
elif contest_var1 == 'Large':
Contest_Size = 10000
linenum_var1 = 2500
strength_var1 = st.selectbox("How sharp is the field in the contest?", ('Not Very', 'Average', 'Very'))
if strength_var1 == 'Not Very':
sharp_split = .33
Strength_var = .50
scaling_var = 5
elif strength_var1 == 'Average':
sharp_split = .50
Strength_var = .25
scaling_var = 10
elif strength_var1 == 'Very':
sharp_split = .75
Strength_var = .01
scaling_var = 15
if 'Sim_Winner_Frame' not in st.session_state:
st.session_state.Sim_Winner_Frame = pd.DataFrame(columns=['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'User/Field', 'Salary', 'Projection', 'Own', 'Fantasy', 'GPP_Proj'])
if 'Sim_Winner_Export' not in st.session_state:
st.session_state.Sim_Winner_Export = pd.DataFrame(columns=['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST', 'User/Field', 'Salary', 'Projection', 'Own', 'Fantasy', 'GPP_Proj'])
with col2:
with st.container():
if st.button("Simulate Contest"):
try:
del dst_freq
del flex_freq
del te_freq
del wr_freq
del rb_freq
del qb_freq
del player_freq
del Sim_Winner_Export
del Sim_Winner_Frame
except:
pass
with st.container():
st.write('Contest Simulation Starting')
seed_depth1 = 10
Total_Runs = 1000000
if Contest_Size <= 1000:
strength_grow = .01
elif Contest_Size > 1000 and Contest_Size <= 2500:
strength_grow = .025
elif Contest_Size > 2500 and Contest_Size <= 5000:
strength_grow = .05
elif Contest_Size > 5000 and Contest_Size <= 20000:
strength_grow = .075
elif Contest_Size > 20000:
strength_grow = .1
field_growth = 100 * strength_grow
Sort_function = 'Median'
if Sort_function == 'Median':
Sim_function = 'Projection'
elif Sort_function == 'Own':
Sim_function = 'Own'
if slate_var1 == 'User':
OwnFrame = proj_dataframe
if contest_var1 == 'Small':
OwnFrame['Own%'] = np.where((OwnFrame['Position'] == 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (10 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean(), OwnFrame['Own'])
OwnFrame['Own%'] = np.where((OwnFrame['Position'] != 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (5 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean(), OwnFrame['Own%'])
OwnFrame['Own%'] = np.where(OwnFrame['Own%'] > 75, 75, OwnFrame['Own%'])
OwnFrame['Own'] = OwnFrame['Own%'] * (900 / OwnFrame['Own%'].sum())
if contest_var1 == 'Medium':
OwnFrame['Own%'] = np.where((OwnFrame['Position'] == 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (6 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean(), OwnFrame['Own'])
OwnFrame['Own%'] = np.where((OwnFrame['Position'] != 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (3 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean(), OwnFrame['Own%'])
OwnFrame['Own%'] = np.where(OwnFrame['Own%'] > 75, 75, OwnFrame['Own%'])
OwnFrame['Own'] = OwnFrame['Own%'] * (900 / OwnFrame['Own%'].sum())
if contest_var1 == 'Large':
OwnFrame['Own%'] = np.where((OwnFrame['Position'] == 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (3 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean(), OwnFrame['Own'])
OwnFrame['Own%'] = np.where((OwnFrame['Position'] != 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (1.5 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean(), OwnFrame['Own%'])
OwnFrame['Own%'] = np.where(OwnFrame['Own%'] > 75, 75, OwnFrame['Own%'])
OwnFrame['Own'] = OwnFrame['Own%'] * (900 / OwnFrame['Own%'].sum())
Overall_Proj = OwnFrame[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
del OwnFrame
elif slate_var1 != 'User':
initial_proj = raw_baselines
drop_frame = initial_proj.drop_duplicates(subset = 'Player',keep = 'first')
OwnFrame = drop_frame[['Player', 'Team', 'Position', 'Median', 'Own', 'Floor', 'Ceiling', 'Salary']]
if contest_var1 == 'Small':
OwnFrame['Own%'] = np.where((OwnFrame['Position'] == 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (10 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean(), OwnFrame['Own'])
OwnFrame['Own%'] = np.where((OwnFrame['Position'] != 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (5 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean(), OwnFrame['Own%'])
OwnFrame['Own%'] = np.where(OwnFrame['Own%'] > 75, 75, OwnFrame['Own%'])
OwnFrame['Own'] = OwnFrame['Own%'] * (900 / OwnFrame['Own%'].sum())
if contest_var1 == 'Medium':
OwnFrame['Own%'] = np.where((OwnFrame['Position'] == 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (6 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean(), OwnFrame['Own'])
OwnFrame['Own%'] = np.where((OwnFrame['Position'] != 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (3 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean(), OwnFrame['Own%'])
OwnFrame['Own%'] = np.where(OwnFrame['Own%'] > 75, 75, OwnFrame['Own%'])
OwnFrame['Own'] = OwnFrame['Own%'] * (900 / OwnFrame['Own%'].sum())
if contest_var1 == 'Large':
OwnFrame['Own%'] = np.where((OwnFrame['Position'] == 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (3 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] == 'QB', 'Own'].mean(), OwnFrame['Own'])
OwnFrame['Own%'] = np.where((OwnFrame['Position'] != 'QB') & (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean() >= 0), OwnFrame['Own'] * (1.5 * (OwnFrame['Own'] - OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean())/100) + OwnFrame.loc[OwnFrame['Position'] != 'QB', 'Own'].mean(), OwnFrame['Own%'])
OwnFrame['Own%'] = np.where(OwnFrame['Own%'] > 75, 75, OwnFrame['Own%'])
OwnFrame['Own'] = OwnFrame['Own%'] * (900 / OwnFrame['Own%'].sum())
Overall_Proj = OwnFrame[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
del initial_proj
del drop_frame
del OwnFrame
if insert_port == 1:
UserPortfolio = portfolio_dataframe[['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST']]
elif insert_port == 0:
UserPortfolio = pd.DataFrame(columns = ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST'])
Overall_Proj.replace('', np.nan, inplace=True)
Overall_Proj = Overall_Proj.dropna(subset=['Median'])
Overall_Proj = Overall_Proj.assign(Value=lambda x: (x.Median / (x.Salary / 1000)))
Overall_Proj['Sort_var'] = (Overall_Proj['Median'].rank(ascending=False) + Overall_Proj['Value'].rank(ascending=False)) / 2
Overall_Proj = Overall_Proj.sort_values(by='Sort_var', ascending=False)
Overall_Proj['Own'] = np.where((Overall_Proj['Median'] > 0) & (Overall_Proj['Own'] == 0), 1, Overall_Proj['Own'])
Overall_Proj = Overall_Proj.loc[Overall_Proj['Own'] > 0]
Overall_Proj['Floor'] = np.where(Overall_Proj['Position'] == 'QB', Overall_Proj['Median'] * .5, Overall_Proj['Median'] * .25)
Overall_Proj['Ceiling'] = np.where(Overall_Proj['Position'] == 'WR', Overall_Proj['Median'] + Overall_Proj['Median'], Overall_Proj['Median'] + Overall_Proj['Floor'])
Overall_Proj['STDev'] = Overall_Proj['Median'] / 4
Teams_used = Overall_Proj['Team'].drop_duplicates().reset_index(drop=True)
Teams_used = Teams_used.reset_index()
Teams_used['team_item'] = Teams_used['index'] + 1
Teams_used = Teams_used.drop(columns=['index'])
Teams_used_dictraw = Teams_used.drop(columns=['team_item'])
Teams_used_dict = Teams_used_dictraw.to_dict()
del Teams_used_dictraw
team_list = Teams_used['Team'].to_list()
item_list = Teams_used['team_item'].to_list()
FieldStrength_raw = Strength_var + ((30 - len(Teams_used)) * .01)
FieldStrength = FieldStrength_raw - (FieldStrength_raw * (20000 / Contest_Size))
del FieldStrength_raw
if FieldStrength < 0:
FieldStrength = Strength_var
field_split = Strength_var
for checkVar in range(len(team_list)):
Overall_Proj['Team'] = Overall_Proj['Team'].replace(team_list, item_list)
qbs_raw = Overall_Proj[Overall_Proj.Position == 'QB']
qbs_raw.dropna(subset=['Median']).reset_index(drop=True)
qbs_raw = qbs_raw.reset_index(drop=True)
qbs_raw = qbs_raw.sort_values(by=['Median'], ascending=False)
qbs = qbs_raw.head(round(len(qbs_raw)))
qbs = qbs.assign(Var = range(0,len(qbs)))
qb_dict = pd.Series(qbs.Player.values, index=qbs.Var).to_dict()
defs_raw = Overall_Proj[Overall_Proj.Position.str.contains("D")]
defs_raw.dropna(subset=['Median']).reset_index(drop=True)
defs_raw = defs_raw.reset_index(drop=True)
defs_raw = defs_raw.sort_values(by=['Own', 'Value'], ascending=False)
defs = defs_raw.head(round(len(defs_raw)))
defs = defs.assign(Var = range(0,len(defs)))
def_dict = pd.Series(defs.Player.values, index=defs.Var).to_dict()
rbs_raw = Overall_Proj[Overall_Proj.Position == 'RB']
rbs_raw.dropna(subset=['Median']).reset_index(drop=True)
rbs_raw = rbs_raw.reset_index(drop=True)
rbs_raw = rbs_raw.sort_values(by=['Own', 'Value'], ascending=False)
wrs_raw = Overall_Proj[Overall_Proj.Position == 'WR']
wrs_raw.dropna(subset=['Median']).reset_index(drop=True)
wrs_raw = wrs_raw.reset_index(drop=True)
wrs_raw = wrs_raw.sort_values(by=['Own', 'Median'], ascending=False)
tes_raw = Overall_Proj[Overall_Proj.Position == 'TE']
tes_raw.dropna(subset=['Median']).reset_index(drop=True)
tes_raw = tes_raw.reset_index(drop=True)
tes_raw = tes_raw.sort_values(by=['Own', 'Value'], ascending=False)
pos_players = pd.concat([rbs_raw, wrs_raw, tes_raw])
pos_players.dropna(subset=['Median']).reset_index(drop=True)
pos_players = pos_players.reset_index(drop=True)
del qbs_raw
del defs_raw
del rbs_raw
del wrs_raw
del tes_raw
if insert_port == 1:
try:
# Initialize an empty DataFrame for Raw Portfolio
Raw_Portfolio = pd.DataFrame()
# Loop through each position and split the data accordingly
positions = ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST']
for pos in positions:
temp_df = UserPortfolio[pos].str.split("(", n=1, expand=True)
temp_df.columns = [pos, 'Drop']
Raw_Portfolio = pd.concat([Raw_Portfolio, temp_df], axis=1)
# Select only necessary columns and strip white spaces
CleanPortfolio = Raw_Portfolio[positions].apply(lambda x: x.str.strip())
CleanPortfolio.reset_index(inplace=True)
CleanPortfolio['User/Field'] = CleanPortfolio['index'] + 1
CleanPortfolio.drop(columns=['index'], inplace=True)
CleanPortfolio.replace('', np.nan, inplace=True)
CleanPortfolio.dropna(subset=['QB'], inplace=True)
# Create frequency table for players
cleaport_players = pd.DataFrame(
np.column_stack(np.unique(CleanPortfolio.iloc[:, 0:9].values, return_counts=True)),
columns=['Player', 'Freq']
).sort_values('Freq', ascending=False).reset_index(drop=True)
cleaport_players['Freq'] = cleaport_players['Freq'].astype(int)
# Merge and update nerf_frame
nerf_frame = pd.merge(cleaport_players, Overall_Proj, on='Player', how='left')
for col in ['Median', 'Floor', 'Ceiling', 'STDev']:
nerf_frame[col] *= 0.90
del Raw_Portfolio
except:
CleanPortfolio = UserPortfolio.reset_index()
CleanPortfolio['User/Field'] = CleanPortfolio['index'] + 1
CleanPortfolio.drop(columns=['index'], inplace=True)
# Replace empty strings and drop rows with NaN in 'QB' column
CleanPortfolio.replace('', np.nan, inplace=True)
CleanPortfolio.dropna(subset=['QB'], inplace=True)
# Create frequency table for players
cleaport_players = pd.DataFrame(
np.column_stack(np.unique(CleanPortfolio.iloc[:, 0:9].values, return_counts=True)),
columns=['Player', 'Freq']
).sort_values('Freq', ascending=False).reset_index(drop=True)
cleaport_players['Freq'] = cleaport_players['Freq'].astype(int)
# Merge and update nerf_frame
nerf_frame = pd.merge(cleaport_players, Overall_Proj, on='Player', how='left')
for col in ['Median', 'Floor', 'Ceiling', 'STDev']:
nerf_frame[col] *= 0.90
elif insert_port == 0:
CleanPortfolio = UserPortfolio
cleaport_players = pd.DataFrame(np.column_stack(np.unique(CleanPortfolio.iloc[:,0:9].values, return_counts=True)),
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
cleaport_players['Freq'] = cleaport_players['Freq'].astype(int)
nerf_frame = Overall_Proj
ref_dict = {
'pos':['RB', 'WR', 'TE', 'FLEX'],
'pos_dfs':['RB_Table', 'WR_Table', 'TE_Table', 'FLEX_Table'],
'pos_dicts':['rb_dict', 'wr_dict', 'te_dict', 'flex_dict']
}
maps_dict = {
'Floor_map':dict(zip(Overall_Proj.Player,Overall_Proj.Floor)),
'Projection_map':dict(zip(Overall_Proj.Player,Overall_Proj.Median)),
'Ceiling_map':dict(zip(Overall_Proj.Player,Overall_Proj.Ceiling)),
'Salary_map':dict(zip(Overall_Proj.Player,Overall_Proj.Salary)),
'Pos_map':dict(zip(Overall_Proj.Player,Overall_Proj.Position)),
'Own_map':dict(zip(Overall_Proj.Player,Overall_Proj.Own)),
'Team_map':dict(zip(Overall_Proj.Player,Overall_Proj.Team)),
'STDev_map':dict(zip(Overall_Proj.Player,Overall_Proj.STDev)),
'team_check_map':dict(zip(Overall_Proj.Player,Overall_Proj.Team))
}
up_dict = {
'Floor_map':dict(zip(cleaport_players.Player,nerf_frame.Floor)),
'Projection_map':dict(zip(cleaport_players.Player,nerf_frame.Median)),
'Ceiling_map':dict(zip(cleaport_players.Player,nerf_frame.Ceiling)),
'Salary_map':dict(zip(cleaport_players.Player,nerf_frame.Salary)),
'Pos_map':dict(zip(cleaport_players.Player,nerf_frame.Position)),
'Own_map':dict(zip(cleaport_players.Player,nerf_frame.Own)),
'Team_map':dict(zip(cleaport_players.Player,nerf_frame.Team)),
'STDev_map':dict(zip(cleaport_players.Player,nerf_frame.STDev)),
'team_check_map':dict(zip(cleaport_players.Player,nerf_frame.Team))
}
del cleaport_players
del Overall_Proj
del nerf_frame
st.write('Seed frame creation')
FinalPortfolio, maps_dict = run_seed_frame(seed_depth1, Strength_var, strength_grow, Teams_used, Total_Runs)
Sim_size = linenum_var1
SimVar = 1
Sim_Winners = []
fp_array = FinalPortfolio.values
if insert_port == 1:
up_array = CleanPortfolio.values
# Pre-vectorize functions
vec_projection_map = np.vectorize(maps_dict['Projection_map'].__getitem__)
vec_stdev_map = np.vectorize(maps_dict['STDev_map'].__getitem__)
if insert_port == 1:
vec_up_projection_map = np.vectorize(up_dict['Projection_map'].__getitem__)
vec_up_stdev_map = np.vectorize(up_dict['STDev_map'].__getitem__)
st.write('Simulating contest on frames')
while SimVar <= Sim_size:
if insert_port == 1:
fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size-len(CleanPortfolio))]
elif insert_port == 0:
fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size)]
sample_arrays1 = np.c_[
fp_random,
np.sum(np.random.normal(
loc=vec_projection_map(fp_random[:, :-5]),
scale=vec_stdev_map(fp_random[:, :-5])),
axis=1)
]
if insert_port == 1:
sample_arrays2 = np.c_[
up_array,
np.sum(np.random.normal(
loc=vec_up_projection_map(up_array[:, :-5]),
scale=vec_up_stdev_map(up_array[:, :-5])),
axis=1)
]
sample_arrays = np.vstack((sample_arrays1, sample_arrays2))
else:
sample_arrays = sample_arrays1
final_array = sample_arrays[sample_arrays[:, 10].argsort()[::-1]]
best_lineup = final_array[final_array[:, -1].argsort(kind='stable')[::-1][:1]]
Sim_Winners.append(best_lineup)
SimVar += 1
# del smple_arrays
# del smple_arrays1
# del smple_arrays2
# del final_array
# del best_lineup
st.write('Contest simulation complete')
# Initial setup
Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners), columns=FinalPortfolio.columns.tolist() + ['Fantasy'])
Sim_Winner_Frame['GPP_Proj'] = (Sim_Winner_Frame['Projection'] + Sim_Winner_Frame['Fantasy']) / 2
# Type Casting
type_cast_dict = {'Salary': int, 'Projection': np.float16, 'Fantasy': np.float16, 'GPP_Proj': np.float16}
Sim_Winner_Frame = Sim_Winner_Frame.astype(type_cast_dict)
# Sorting
st.session_state.Sim_Winner_Frame = Sim_Winner_Frame.sort_values(by='GPP_Proj', ascending=False)
# Data Copying
st.session_state.Sim_Winner_Export = Sim_Winner_Frame.copy()
# Conditional Replacement
columns_to_replace = ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX', 'DST']
if site_var1 == 'Draftkings':
replace_dict = dkid_dict
elif site_var1 == 'Fanduel':
replace_dict = fdid_dict
for col in columns_to_replace:
st.session_state.Sim_Winner_Export[col].replace(replace_dict, inplace=True)
player_freq = pd.DataFrame(np.column_stack(np.unique(Sim_Winner_Frame.iloc[:,0:9].values, return_counts=True)),
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
player_freq['Freq'] = player_freq['Freq'].astype(int)
player_freq['Position'] = player_freq['Player'].map(maps_dict['Pos_map'])
player_freq['Salary'] = player_freq['Player'].map(maps_dict['Salary_map'])
player_freq['Proj Own'] = player_freq['Player'].map(maps_dict['Own_map']) / 100
player_freq['Exposure'] = player_freq['Freq']/(Sim_size)
player_freq['Edge'] = player_freq['Exposure'] - player_freq['Proj Own']
player_freq['Team'] = player_freq['Player'].map(maps_dict['Team_map'])
for checkVar in range(len(team_list)):
player_freq['Team'] = player_freq['Team'].replace(item_list, team_list)
player_freq = player_freq[['Player', 'Position', 'Team', 'Salary', 'Proj Own', 'Exposure', 'Edge']]
qb_freq = pd.DataFrame(np.column_stack(np.unique(Sim_Winner_Frame.iloc[:,0:1].values, return_counts=True)),
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
qb_freq['Freq'] = qb_freq['Freq'].astype(int)
qb_freq['Position'] = qb_freq['Player'].map(maps_dict['Pos_map'])
qb_freq['Salary'] = qb_freq['Player'].map(maps_dict['Salary_map'])
qb_freq['Proj Own'] = qb_freq['Player'].map(maps_dict['Own_map']) / 100
qb_freq['Exposure'] = qb_freq['Freq']/(Sim_size)
qb_freq['Edge'] = qb_freq['Exposure'] - qb_freq['Proj Own']
qb_freq['Team'] = qb_freq['Player'].map(maps_dict['Team_map'])
for checkVar in range(len(team_list)):
qb_freq['Team'] = qb_freq['Team'].replace(item_list, team_list)
qb_freq = qb_freq[['Player', 'Team', 'Position', 'Salary', 'Proj Own', 'Exposure', 'Edge']]
rb_freq = pd.DataFrame(np.column_stack(np.unique(Sim_Winner_Frame.iloc[:,[1, 2]].values, return_counts=True)),
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
rb_freq['Freq'] = rb_freq['Freq'].astype(int)
rb_freq['Position'] = rb_freq['Player'].map(maps_dict['Pos_map'])
rb_freq['Salary'] = rb_freq['Player'].map(maps_dict['Salary_map'])
rb_freq['Proj Own'] = rb_freq['Player'].map(maps_dict['Own_map']) / 100
rb_freq['Exposure'] = rb_freq['Freq']/Sim_size
rb_freq['Edge'] = rb_freq['Exposure'] - rb_freq['Proj Own']
rb_freq['Team'] = rb_freq['Player'].map(maps_dict['Team_map'])
for checkVar in range(len(team_list)):
rb_freq['Team'] = rb_freq['Team'].replace(item_list, team_list)
rb_freq = rb_freq[['Player', 'Team', 'Position', 'Salary', 'Proj Own', 'Exposure', 'Edge']]
wr_freq = pd.DataFrame(np.column_stack(np.unique(Sim_Winner_Frame.iloc[:,[3, 4, 5]].values, return_counts=True)),
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
wr_freq['Freq'] = wr_freq['Freq'].astype(int)
wr_freq['Position'] = wr_freq['Player'].map(maps_dict['Pos_map'])
wr_freq['Salary'] = wr_freq['Player'].map(maps_dict['Salary_map'])
wr_freq['Proj Own'] = wr_freq['Player'].map(maps_dict['Own_map']) / 100
wr_freq['Exposure'] = wr_freq['Freq']/Sim_size
wr_freq['Edge'] = wr_freq['Exposure'] - wr_freq['Proj Own']
wr_freq['Team'] = wr_freq['Player'].map(maps_dict['Team_map'])
for checkVar in range(len(team_list)):
wr_freq['Team'] = wr_freq['Team'].replace(item_list, team_list)
wr_freq = wr_freq[['Player', 'Team', 'Position', 'Salary', 'Proj Own', 'Exposure', 'Edge']]
te_freq = pd.DataFrame(np.column_stack(np.unique(Sim_Winner_Frame.iloc[:,[6]].values, return_counts=True)),
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
te_freq['Freq'] = te_freq['Freq'].astype(int)
te_freq['Position'] = te_freq['Player'].map(maps_dict['Pos_map'])
te_freq['Salary'] = te_freq['Player'].map(maps_dict['Salary_map'])
te_freq['Proj Own'] = te_freq['Player'].map(maps_dict['Own_map']) / 100
te_freq['Exposure'] = te_freq['Freq']/Sim_size
te_freq['Edge'] = te_freq['Exposure'] - te_freq['Proj Own']
te_freq['Team'] = te_freq['Player'].map(maps_dict['Team_map'])
for checkVar in range(len(team_list)):
te_freq['Team'] = te_freq['Team'].replace(item_list, team_list)
te_freq = te_freq[['Player', 'Team', 'Position', 'Salary', 'Proj Own', 'Exposure', 'Edge']]
flex_freq = pd.DataFrame(np.column_stack(np.unique(Sim_Winner_Frame.iloc[:,[7]].values, return_counts=True)),
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
flex_freq['Freq'] = flex_freq['Freq'].astype(int)
flex_freq['Position'] = flex_freq['Player'].map(maps_dict['Pos_map'])
flex_freq['Salary'] = flex_freq['Player'].map(maps_dict['Salary_map'])
flex_freq['Proj Own'] = flex_freq['Player'].map(maps_dict['Own_map']) / 100
flex_freq['Exposure'] = flex_freq['Freq']/Sim_size
flex_freq['Edge'] = flex_freq['Exposure'] - flex_freq['Proj Own']
flex_freq['Team'] = flex_freq['Player'].map(maps_dict['Team_map'])
for checkVar in range(len(team_list)):
flex_freq['Team'] = flex_freq['Team'].replace(item_list, team_list)
flex_freq = flex_freq[['Player', 'Team', 'Position', 'Salary', 'Proj Own', 'Exposure', 'Edge']]
dst_freq = pd.DataFrame(np.column_stack(np.unique(Sim_Winner_Frame.iloc[:,8:9].values, return_counts=True)),
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
dst_freq['Freq'] = dst_freq['Freq'].astype(int)
dst_freq['Position'] = dst_freq['Player'].map(maps_dict['Pos_map'])
dst_freq['Salary'] = dst_freq['Player'].map(maps_dict['Salary_map'])
dst_freq['Proj Own'] = dst_freq['Player'].map(maps_dict['Own_map']) / 100
dst_freq['Exposure'] = dst_freq['Freq']/Sim_size
dst_freq['Edge'] = dst_freq['Exposure'] - dst_freq['Proj Own']
dst_freq['Team'] = dst_freq['Player'].map(maps_dict['Team_map'])
for checkVar in range(len(team_list)):
dst_freq['Team'] = dst_freq['Team'].replace(item_list, team_list)
dst_freq = dst_freq[['Player', 'Team', 'Position', 'Salary', 'Proj Own', 'Exposure', 'Edge']]
with st.container():
simulate_container = st.empty()
if "df" not in st.session_state:
st.session_state["df"] = None
st.dataframe(st.session_state.Sim_Winner_Frame.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').background_gradient(cmap='RdYlGn_r', subset=['Own']).format(precision=2), use_container_width = True)
st.download_button(
label="Export Tables",
data=convert_df_to_csv(st.session_state.Sim_Winner_Export),
file_name='NFL_consim_export.csv',
mime='text/csv',
)
with st.container():
freq_container = st.empty()
tab1, tab2, tab3, tab4, tab5, tab6, tab7 = st.tabs(['Overall Exposures', 'QB Exposures', 'RB Exposures', 'WR Exposures', 'TE Exposures', 'FLEX Exposures', 'DST Exposures'])
with tab1:
st.dataframe(player_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
st.download_button(
label="Export Exposures",
data=convert_df_to_csv(player_freq),
file_name='player_freq_export.csv',
mime='text/csv',
)
with tab2:
st.dataframe(qb_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
st.download_button(
label="Export Exposures",
data=convert_df_to_csv(qb_freq),
file_name='qb_freq_export.csv',
mime='text/csv',
)
with tab3:
st.dataframe(rb_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
st.download_button(
label="Export Exposures",
data=convert_df_to_csv(rb_freq),
file_name='rb_freq_export.csv',
mime='text/csv',
)
with tab4:
st.dataframe(wr_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
st.download_button(
label="Export Exposures",
data=convert_df_to_csv(wr_freq),
file_name='wr_freq_export.csv',
mime='text/csv',
)
with tab5:
st.dataframe(te_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
st.download_button(
label="Export Exposures",
data=convert_df_to_csv(te_freq),
file_name='te_freq_export.csv',
mime='text/csv',
)
with tab6:
st.dataframe(flex_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
st.download_button(
label="Export Exposures",
data=convert_df_to_csv(flex_freq),
file_name='flex_freq_export.csv',
mime='text/csv',
)
with tab7:
st.dataframe(dst_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
st.download_button(
label="Export Exposures",
data=convert_df_to_csv(dst_freq),
file_name='dst_freq_export.csv',
mime='text/csv',
)