James McCool
Enhance position management in Streamlit app by implementing position limits for DraftKings and FanDuel. Update styling for tabs and add functionality to validate player combinations based on eligible positions. Refactor position selection logic for improved user experience.
d09a5ca
import numpy as np | |
import pandas as pd | |
import streamlit as st | |
from itertools import combinations | |
import pymongo | |
st.set_page_config(layout="wide") | |
def init_conn(): | |
uri = st.secrets['mongo_uri'] | |
client = pymongo.MongoClient(uri, retryWrites=True, serverSelectionTimeoutMS=500000) | |
db = client["MLB_Database"] | |
return db | |
db = init_conn() | |
game_format = {'Win Percentage': '{:.2%}','First Inning Lead Percentage': '{:.2%}', | |
'Fifth Inning Lead Percentage': '{:.2%}', '8+ runs': '{:.2%}', 'DK LevX': '{:.2%}', 'FD LevX': '{:.2%}'} | |
team_roo_format = {'Top Score%': '{:.2%}','0 Runs': '{:.2%}', '1 Run': '{:.2%}', '2 Runs': '{:.2%}', '3 Runs': '{:.2%}', '4 Runs': '{:.2%}', | |
'5 Runs': '{:.2%}','6 Runs': '{:.2%}', '7 Runs': '{:.2%}', '8 Runs': '{:.2%}', '9 Runs': '{:.2%}', '10 Runs': '{:.2%}'} | |
wrong_acro = ['WSH', 'AZ', 'CHW'] | |
right_acro = ['WAS', 'ARI', 'CWS'] | |
st.markdown(""" | |
<style> | |
/* Tab styling */ | |
.stTabs [data-baseweb="tab-list"] { | |
gap: 8px; | |
padding: 4px; | |
} | |
.stTabs [data-baseweb="tab"] { | |
height: 50px; | |
white-space: pre-wrap; | |
background-color: #DAA520; | |
color: white; | |
border-radius: 10px; | |
gap: 1px; | |
padding: 10px 20px; | |
font-weight: bold; | |
transition: all 0.3s ease; | |
} | |
.stTabs [aria-selected="true"] { | |
background-color: #DAA520; | |
border: 3px solid #FFD700; | |
color: white; | |
} | |
.stTabs [data-baseweb="tab"]:hover { | |
background-color: #FFD700; | |
cursor: pointer; | |
} | |
div[data-baseweb="select"] > div { | |
background-color: #DAA520; | |
color: white; | |
} | |
div{ | |
box-sizing: content-box !important; | |
} | |
</style>""", unsafe_allow_html=True) | |
def init_stat_load(): | |
collection = db["Player_Range_Of_Outcomes"] | |
cursor = collection.find() | |
raw_display = pd.DataFrame(list(cursor)) | |
raw_display = raw_display[['Player', 'Position', 'Team', 'Salary', 'Floor', 'Median', 'Ceiling', 'Own%', 'Site', 'Slate']] | |
raw_display = raw_display.rename(columns={'Own%': 'Own'}) | |
initial_concat = raw_display.sort_values(by='Own', ascending=False) | |
return initial_concat | |
def convert_df_to_csv(df): | |
return df.to_csv().encode('utf-8') | |
proj_raw = init_stat_load() | |
col1, col2 = st.columns([1, 5]) | |
with col1: | |
with st.container(): | |
if st.button("Load/Reset Data", key='reset1'): | |
st.cache_data.clear() | |
proj_raw = init_stat_load() | |
for key in st.session_state.keys(): | |
del st.session_state[key] | |
site_var1 = st.radio("What site are you working with?", ('Draftkings', 'Fanduel'), key='site_var1') | |
slate_var1 = st.radio("What slate are you working with?", ('Main Slate', 'Secondary Slate'), key='slate_var1') | |
if site_var1 == 'Draftkings': | |
raw_baselines = proj_raw[proj_raw['Site'] == 'Draftkings'] | |
if slate_var1 == 'Main Slate': | |
raw_baselines = raw_baselines[raw_baselines['Slate'] == 'main_slate'] | |
elif slate_var1 == 'Secondary Slate': | |
raw_baselines = raw_baselines[raw_baselines['Slate'] == 'secondary_slate'] | |
raw_baselines = raw_baselines.sort_values(by='Own', ascending=False) | |
elif site_var1 == 'Fanduel': | |
raw_baselines = proj_raw[proj_raw['Site'] == 'Fanduel'] | |
if slate_var1 == 'Main Slate': | |
raw_baselines = raw_baselines[raw_baselines['Slate'] == 'main_slate'] | |
elif slate_var1 == 'Secondary Slate': | |
raw_baselines = raw_baselines[raw_baselines['Slate'] == 'secondary_slate'] | |
raw_baselines = raw_baselines.sort_values(by='Own', ascending=False) | |
split_var2 = st.radio("Would you like to run stack analysis for the full slate or individual teams?", ('Full Slate Run', 'Specific Teams'), key='split_var2') | |
if split_var2 == 'Specific Teams': | |
team_var2 = st.multiselect('Which teams would you like to include in the analysis?', options = raw_baselines['Team'].unique(), key='team_var2') | |
elif split_var2 == 'Full Slate Run': | |
team_var2 = raw_baselines.Team.unique().tolist() | |
pos_split2 = st.radio("Are you viewing all positions, specific groups, or specific positions?", ('All Positions', 'Specific Positions'), key='pos_split2') | |
if pos_split2 == 'Specific Positions': | |
pos_var2 = st.multiselect('What Positions would you like to view?', options = ['C', '1B', '2B', '3B', 'SS', 'OF']) | |
elif pos_split2 == 'All Positions': | |
pos_var2 = 'All' | |
if site_var1 == 'Draftkings': | |
max_sal2 = st.number_input('Max Salary', min_value = 5000, max_value = 50000, value = 35000, step = 100, key='max_sal2') | |
elif site_var1 == 'Fanduel': | |
max_sal2 = st.number_input('Max Salary', min_value = 5000, max_value = 35000, value = 25000, step = 100, key='max_sal2') | |
size_var2 = st.selectbox('What size of stacks are you analyzing?', options = ['3-man', '4-man', '5-man']) | |
if size_var2 == '3-man': | |
stack_size = 3 | |
if size_var2 == '4-man': | |
stack_size = 4 | |
if size_var2 == '5-man': | |
stack_size = 5 | |
team_dict = dict(zip(raw_baselines.Player, raw_baselines.Team)) | |
proj_dict = dict(zip(raw_baselines.Player, raw_baselines.Median)) | |
own_dict = dict(zip(raw_baselines.Player, raw_baselines.Own)) | |
cost_dict = dict(zip(raw_baselines.Player, raw_baselines.Salary)) | |
with col2: | |
if site_var1 == 'Draftkings': | |
position_limits = { | |
'C': 1, | |
'1B': 1, | |
'2B': 1, | |
'3B': 1, | |
'SS': 1, | |
'OF': 3, | |
# Add more as needed | |
} | |
max_salary = 50000 | |
max_players = 10 | |
else: | |
position_limits = { | |
'C_1B': 1, | |
'2B': 1, | |
'3B': 1, | |
'SS': 1, | |
'OF': 3, | |
'UTIL': 1, | |
# Add more as needed | |
} | |
stack_hold_container = st.empty() | |
comb_list = [] | |
if pos_split2 == 'All Positions': | |
raw_baselines = raw_baselines | |
elif pos_split2 != 'All Positions': | |
raw_baselines = raw_baselines[raw_baselines['Position'].str.contains('|'.join(pos_var2))] | |
# Create a position dictionary mapping players to their eligible positions | |
pos_dict = dict(zip(raw_baselines.Player, raw_baselines.Position)) | |
def is_valid_combination(combo): | |
# Count positions in this combination | |
position_counts = {pos: 0 for pos in position_limits.keys()} | |
# For each player in the combination | |
for player in combo: | |
# Get their eligible positions | |
player_positions = pos_dict[player].split('/') | |
# For each position they can play | |
for pos in player_positions: | |
# Handle special cases for FanDuel | |
if site_var1 == 'Fanduel': | |
if pos in ['C', '1B']: | |
position_counts['C_1B'] += 1 | |
elif pos == 'UTIL': | |
# UTIL can be filled by any position | |
for p in position_counts: | |
position_counts[p] += 1 | |
else: # DraftKings | |
if pos in position_counts: | |
position_counts[pos] += 1 | |
# Check if any position exceeds its limit | |
for pos, limit in position_limits.items(): | |
if position_counts[pos] > limit: | |
return False | |
return True | |
# Modify the combination generation code | |
comb_list = [] | |
for cur_team in team_var2: | |
working_baselines = raw_baselines | |
working_baselines = working_baselines[working_baselines['Team'] == cur_team] | |
working_baselines = working_baselines[working_baselines['Position'] != 'SP'] | |
working_baselines = working_baselines[working_baselines['Position'] != 'P'] | |
order_list = working_baselines['Player'] | |
comb = combinations(order_list, stack_size) | |
# Only add combinations that satisfy position limits | |
for i in list(comb): | |
if is_valid_combination(i): | |
comb_list.append(i) | |
comb_DF = pd.DataFrame(comb_list) | |
if stack_size == 3: | |
comb_DF['Team'] = comb_DF[0].map(team_dict) | |
comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict), | |
comb_DF[1].map(proj_dict), | |
comb_DF[2].map(proj_dict)]) | |
comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict), | |
comb_DF[1].map(cost_dict), | |
comb_DF[2].map(cost_dict)]) | |
comb_DF['Own%'] = sum([comb_DF[0].map(own_dict), | |
comb_DF[1].map(own_dict), | |
comb_DF[2].map(own_dict)]) | |
elif stack_size == 4: | |
comb_DF['Team'] = comb_DF[0].map(team_dict) | |
comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict), | |
comb_DF[1].map(proj_dict), | |
comb_DF[2].map(proj_dict), | |
comb_DF[3].map(proj_dict)]) | |
comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict), | |
comb_DF[1].map(cost_dict), | |
comb_DF[2].map(cost_dict), | |
comb_DF[3].map(cost_dict)]) | |
comb_DF['Own%'] = sum([comb_DF[0].map(own_dict), | |
comb_DF[1].map(own_dict), | |
comb_DF[2].map(own_dict), | |
comb_DF[3].map(own_dict)]) | |
elif stack_size == 5: | |
comb_DF['Team'] = comb_DF[0].map(team_dict) | |
comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict), | |
comb_DF[1].map(proj_dict), | |
comb_DF[2].map(proj_dict), | |
comb_DF[3].map(proj_dict), | |
comb_DF[4].map(proj_dict)]) | |
comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict), | |
comb_DF[1].map(cost_dict), | |
comb_DF[2].map(cost_dict), | |
comb_DF[3].map(cost_dict), | |
comb_DF[4].map(cost_dict)]) | |
comb_DF['Own%'] = sum([comb_DF[0].map(own_dict), | |
comb_DF[1].map(own_dict), | |
comb_DF[2].map(own_dict), | |
comb_DF[3].map(own_dict), | |
comb_DF[4].map(own_dict)]) | |
comb_DF = comb_DF.sort_values(by='Proj', ascending=False) | |
comb_DF = comb_DF.loc[comb_DF['Salary'] <= max_sal2] | |
cut_var = 0 | |
if stack_size == 3: | |
while cut_var <= int(len(comb_DF)): | |
try: | |
if int(cut_var) == 0: | |
cur_proj = float(comb_DF.iat[cut_var,4]) | |
cur_own = float(comb_DF.iat[cut_var,6]) | |
elif int(cut_var) >= 1: | |
check_own = float(comb_DF.iat[cut_var,6]) | |
if check_own > cur_own: | |
comb_DF = comb_DF.drop([cut_var]) | |
cur_own = cur_own | |
cut_var = cut_var - 1 | |
comb_DF = comb_DF.reset_index() | |
comb_DF = comb_DF.drop(['index'], axis=1) | |
elif check_own <= cur_own: | |
cur_own = float(comb_DF.iat[cut_var,6]) | |
cut_var = cut_var | |
cut_var += 1 | |
except: | |
cut_var += 1 | |
elif stack_size == 4: | |
while cut_var <= int(len(comb_DF)): | |
try: | |
if int(cut_var) == 0: | |
cur_proj = float(comb_DF.iat[cut_var,5]) | |
cur_own = float(comb_DF.iat[cut_var,7]) | |
elif int(cut_var) >= 1: | |
check_own = float(comb_DF.iat[cut_var,7]) | |
if check_own > cur_own: | |
comb_DF = comb_DF.drop([cut_var]) | |
cur_own = cur_own | |
cut_var = cut_var - 1 | |
comb_DF = comb_DF.reset_index() | |
comb_DF = comb_DF.drop(['index'], axis=1) | |
elif check_own <= cur_own: | |
cur_own = float(comb_DF.iat[cut_var,7]) | |
cut_var = cut_var | |
cut_var += 1 | |
except: | |
cut_var += 1 | |
elif stack_size == 5: | |
while cut_var <= int(len(comb_DF)): | |
try: | |
if int(cut_var) == 0: | |
cur_proj = float(comb_DF.iat[cut_var,6]) | |
cur_own = float(comb_DF.iat[cut_var,8]) | |
elif int(cut_var) >= 1: | |
check_own = float(comb_DF.iat[cut_var,8]) | |
if check_own > cur_own: | |
comb_DF = comb_DF.drop([cut_var]) | |
cur_own = cur_own | |
cut_var = cut_var - 1 | |
comb_DF = comb_DF.reset_index() | |
comb_DF = comb_DF.drop(['index'], axis=1) | |
elif check_own <= cur_own: | |
cur_own = float(comb_DF.iat[cut_var,8]) | |
cut_var = cut_var | |
cut_var += 1 | |
except: | |
cut_var += 1 | |
with stack_hold_container: | |
stack_hold_container = st.empty() | |
st.dataframe(comb_DF.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True) | |
st.download_button( | |
label="Export Tables", | |
data=convert_df_to_csv(comb_DF), | |
file_name='MLB_Stack_Options_export.csv', | |
mime='text/csv', | |
) |