import pulp import numpy as np import pandas as pd import streamlit as st import gspread from itertools import combinations scope = ['https://www.googleapis.com/auth/spreadsheets', "https://www.googleapis.com/auth/drive"] credentials = { "type": "service_account", "project_id": "sheets-api-connect-378620", "private_key_id": "1005124050c80d085e2c5b344345715978dd9cc9", "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtKa01beXwc88R\nnPZVQTNPVQuBnbwoOfc66gW3547ja/UEyIGAF112dt/VqHprRafkKGmlg55jqJNt\na4zceLKV+wTm7vBu7lDISTJfGzCf2TrxQYNqwMKE2LOjI69dBM8u4Dcb4k0wcp9v\ntW1ZzLVVuwTvmrg7JBHjiSaB+x5wxm/r3FOiJDXdlAgFlytzqgcyeZMJVKKBQHyJ\njEGg/1720A0numuOCt71w/2G0bDmijuj1e6tH32MwRWcvRNZ19K9ssyDz2S9p68s\nYDhIxX69OWxwScTIHLY6J2t8txf/XMivL/636fPlDADvBEVTdlT606n8CcKUVQeq\npUVdG+lfAgMBAAECggEAP38SUA7B69eTfRpo658ycOs3Amr0JW4H/bb1rNeAul0K\nZhwd/HnU4E07y81xQmey5kN5ZeNrD5EvqkZvSyMJHV0EEahZStwhjCfnDB/cxyix\nZ+kFhv4y9eK+kFpUAhBy5nX6T0O+2T6WvzAwbmbVsZ+X8kJyPuF9m8ldcPlD0sce\ntj8NwVq1ys52eosqs7zi2vjt+eMcaY393l4ls+vNq8Yf27cfyFw45W45CH/97/Nu\n5AmuzlCOAfFF+z4OC5g4rei4E/Qgpxa7/uom+BVfv9G0DIGW/tU6Sne0+37uoGKt\nW6DzhgtebUtoYkG7ZJ05BTXGp2lwgVcNRoPwnKJDxQKBgQDT5wYPUBDW+FHbvZSp\nd1m1UQuXyerqOTA9smFaM8sr/UraeH85DJPEIEk8qsntMBVMhvD3Pw8uIUeFNMYj\naLmZFObsL+WctepXrVo5NB6RtLB/jZYxiKMatMLUJIYtcKIp+2z/YtKiWcLnwotB\nWdCjVnPTxpkurmF2fWP/eewZ+wKBgQDRMtJg7etjvKyjYNQ5fARnCc+XsI3gkBe1\nX9oeXfhyfZFeBXWnZzN1ITgFHplDznmBdxAyYGiQdbbkdKQSghviUQ0igBvoDMYy\n1rWcy+a17Mj98uyNEfmb3X2cC6WpvOZaGHwg9+GY67BThwI3FqHIbyk6Ko09WlTX\nQpRQjMzU7QKBgAfi1iflu+q0LR+3a3vvFCiaToskmZiD7latd9AKk2ocsBd3Woy9\n+hXXecJHPOKV4oUJlJgvAZqe5HGBqEoTEK0wyPNLSQlO/9ypd+0fEnArwFHO7CMF\nycQprAKHJXM1eOOFFuZeQCaInqdPZy1UcV5Szla4UmUZWkk1m24blHzXAoGBAMcA\nyH4qdbxX9AYrC1dvsSRvgcnzytMvX05LU0uF6tzGtG0zVlub4ahvpEHCfNuy44UT\nxRWW/oFFaWjjyFxO5sWggpUqNuHEnRopg3QXx22SRRTGbN45li/+QAocTkgsiRh1\nqEcYZsO4mPCsQqAy6E2p6RcK+Xa+omxvSnVhq0x1AoGAKr8GdkCl4CF6rieLMAQ7\nLNBuuoYGaHoh8l5E2uOQpzwxVy/nMBcAv+2+KqHEzHryUv1owOi6pMLv7A9mTFoS\n18B0QRLuz5fSOsVnmldfC9fpUc6H8cH1SINZpzajqQA74bPwELJjnzrCnH79TnHG\nJuElxA33rFEjbgbzdyrE768=\n-----END PRIVATE KEY-----\n", "client_email": "gspread-connection@sheets-api-connect-378620.iam.gserviceaccount.com", "client_id": "106625872877651920064", "auth_uri": "https://accounts.google.com/o/oauth2/auth", "token_uri": "https://oauth2.googleapis.com/token", "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40sheets-api-connect-378620.iam.gserviceaccount.com" } gc = gspread.service_account_from_dict(credentials) st.set_page_config(layout="wide") game_format = {'Win Percentage': '{:.2%}','First Inning Lead Percentage': '{:.2%}', 'Fifth Inning Lead Percentage': '{:.2%}', '8+ runs': '{:.2%}', 'DK LevX': '{:.2%}', 'FD LevX': '{:.2%}'} team_roo_format = {'Top Score%': '{:.2%}','0 Runs': '{:.2%}', '1 Run': '{:.2%}', '2 Runs': '{:.2%}', '3 Runs': '{:.2%}', '4 Runs': '{:.2%}', '5 Runs': '{:.2%}','6 Runs': '{:.2%}', '7 Runs': '{:.2%}', '8 Runs': '{:.2%}', '9 Runs': '{:.2%}', '10 Runs': '{:.2%}'} wrong_acro = ['WSH', 'AZ', 'CHW'] right_acro = ['WAS', 'ARI', 'CWS'] dk_player_projections = 'https://docs.google.com/spreadsheets/d/1MdzPFqIT0MFid2IhegWf39VNR8IXUyo_Fb5dolOSt3o/edit#gid=340831852' fd_player_projections = 'https://docs.google.com/spreadsheets/d/1MdzPFqIT0MFid2IhegWf39VNR8IXUyo_Fb5dolOSt3o/edit#gid=340831852' secondary_dk_player_projections = 'https://docs.google.com/spreadsheets/d/1lP4t8N7UhjR94MEwPn6powRyLl_cQBDUMSCs6cbL9ms/edit#gid=340831852' secondary_fd_player_projections = 'https://docs.google.com/spreadsheets/d/1lP4t8N7UhjR94MEwPn6powRyLl_cQBDUMSCs6cbL9ms/edit#gid=340831852' all_dk_player_projections = 'https://docs.google.com/spreadsheets/d/1f42Ergav8K1VsOLOK9MUn7DM_MLMvv4GR2Fy7EfnZTc/edit#gid=500994479' all_fd_player_projections = 'https://docs.google.com/spreadsheets/d/1f42Ergav8K1VsOLOK9MUn7DM_MLMvv4GR2Fy7EfnZTc/edit#gid=500994479' final_Proj = 0 @st.cache_data def load_time(): sh = gc.open_by_url(dk_player_projections) worksheet = sh.worksheet('Timestamp') raw_stamp = worksheet.acell('a1').value t_stamp = f"Last update was at {raw_stamp}" return t_stamp @st.cache_data def set_slate_teams(): sh = gc.open_by_url(all_dk_player_projections) worksheet = sh.worksheet('Site_Info') raw_display = pd.DataFrame(worksheet.get_all_records()) for checkVar in range(len(wrong_acro)): raw_display['FD Main'] = raw_display['FD Main'].replace(wrong_acro, right_acro) for checkVar in range(len(wrong_acro)): raw_display['FD Secondary'] = raw_display['FD Secondary'].replace(wrong_acro, right_acro) for checkVar in range(len(wrong_acro)): raw_display['FD Overall'] = raw_display['FD Overall'].replace(wrong_acro, right_acro) return raw_display @st.cache_data def load_dk_player_projections(URL): sh = gc.open_by_url(URL) worksheet = sh.worksheet('DK_Projections') load_display = pd.DataFrame(worksheet.get_all_records()) load_display.replace('', np.nan, inplace=True) raw_display = load_display.dropna(subset=['Median']) raw_display = raw_display.dropna(subset='Player') return raw_display @st.cache_data def load_fd_player_projections(URL): sh = gc.open_by_url(URL) worksheet = sh.worksheet('FD_Projections') load_display = pd.DataFrame(worksheet.get_all_records()) load_display.replace('', np.nan, inplace=True) raw_display = load_display.dropna(subset=['Median']) raw_display = raw_display.dropna(subset='Player') return raw_display @st.cache_data def convert_df_to_csv(df): return df.to_csv().encode('utf-8') t_stamp = load_time() site_slates = set_slate_teams() col1, col2 = st.columns([1, 5]) with col1: st.info(t_stamp) if st.button("Load/Reset Data", key='reset4'): st.cache_data.clear() slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'All Games'), key='slate_var1') site_var2 = st.radio("What site are you playing?", ('Draftkings', 'Fanduel'), key='site_var2') if slate_var1 == 'Main Slate': if site_var2 == 'Draftkings': slate_teams = site_slates['DK Main'].values.tolist() raw_baselines = load_dk_player_projections(all_dk_player_projections) raw_baselines = raw_baselines[raw_baselines['Team'].isin(slate_teams)] elif site_var2 == 'Fanduel': slate_teams = site_slates['FD Main'].values.tolist() raw_baselines = load_fd_player_projections(all_fd_player_projections) raw_baselines = raw_baselines[raw_baselines['Team'].isin(slate_teams)] elif slate_var1 == 'Secondary Slate': if site_var2 == 'Draftkings': slate_teams = site_slates['DK Secondary'].values.tolist() raw_baselines = load_dk_player_projections(all_dk_player_projections) raw_baselines = raw_baselines[raw_baselines['Team'].isin(slate_teams)] elif site_var2 == 'Fanduel': slate_teams = site_slates['FD Secondary'].values.tolist() raw_baselines = load_fd_player_projections(all_fd_player_projections) raw_baselines = raw_baselines[raw_baselines['Team'].isin(slate_teams)] elif slate_var1 == 'All Games': if site_var2 == 'Draftkings': slate_teams = site_slates['DK Overall'].values.tolist() raw_baselines = load_dk_player_projections(all_dk_player_projections) raw_baselines = raw_baselines[raw_baselines['Team'].isin(slate_teams)] elif site_var2 == 'Fanduel': slate_teams = site_slates['FD Overall'].values.tolist() raw_baselines = load_fd_player_projections(all_fd_player_projections) raw_baselines = raw_baselines[raw_baselines['Team'].isin(slate_teams)] split_var2 = st.radio("Would you like to run stack analysis for the full slate or individual teams?", ('Full Slate Run', 'Specific Teams'), key='split_var2') if split_var2 == 'Specific Teams': team_var2 = st.multiselect('Which teams would you like to include in the analysis?', options = raw_baselines['Team'].unique(), key='team_var2') elif split_var2 == 'Full Slate Run': team_var2 = raw_baselines.Team.unique().tolist() pos_split2 = st.radio("Are you viewing all positions, specific groups, or specific positions?", ('All Positions', 'Specific Positions'), key='pos_split2') if pos_split2 == 'Specific Positions': pos_var2 = st.multiselect('What Positions would you like to view?', options = ['SP', 'P', 'C', '1B', '2B', '3B', 'SS', 'OF']) elif pos_split2 == 'All Positions': pos_var2 = 'All' if site_var2 == 'Draftkings': max_sal2 = st.number_input('Max Salary', min_value = 5000, max_value = 50000, value = 35000, step = 100, key='max_sal2') elif site_var2 == 'Fanduel': max_sal2 = st.number_input('Max Salary', min_value = 5000, max_value = 35000, value = 25000, step = 100, key='max_sal2') size_var2 = st.selectbox('What size of stacks are you analyzing?', options = ['3-man', '4-man', '5-man']) if size_var2 == '3-man': stack_size = 3 if size_var2 == '4-man': stack_size = 4 if size_var2 == '5-man': stack_size = 5 team_dict = dict(zip(raw_baselines.Player, raw_baselines.Team)) proj_dict = dict(zip(raw_baselines.Player, raw_baselines.Median)) own_dict = dict(zip(raw_baselines.Player, raw_baselines.Own)) cost_dict = dict(zip(raw_baselines.Player, raw_baselines.Salary)) with col2: stack_hold_container = st.empty() if st.button('Run stack analysis'): comb_list = [] if pos_split2 == 'All Positions': raw_baselines = raw_baselines elif pos_split2 != 'All Positions': raw_baselines = raw_baselines[raw_baselines['Position'].str.contains('|'.join(pos_var2))] for cur_team in team_var2: working_baselines = raw_baselines working_baselines = working_baselines[working_baselines['Team'] == cur_team] working_baselines = working_baselines[working_baselines['Position'] != 'SP'] working_baselines = working_baselines[working_baselines['Position'] != 'P'] order_list = working_baselines['Player'] comb = combinations(order_list, stack_size) for i in list(comb): comb_list.append(i) comb_DF = pd.DataFrame(comb_list) if stack_size == 3: comb_DF['Team'] = comb_DF[0].map(team_dict) comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict), comb_DF[1].map(proj_dict), comb_DF[2].map(proj_dict)]) comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict), comb_DF[1].map(cost_dict), comb_DF[2].map(cost_dict)]) comb_DF['Own%'] = sum([comb_DF[0].map(own_dict), comb_DF[1].map(own_dict), comb_DF[2].map(own_dict)]) elif stack_size == 4: comb_DF['Team'] = comb_DF[0].map(team_dict) comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict), comb_DF[1].map(proj_dict), comb_DF[2].map(proj_dict), comb_DF[3].map(proj_dict)]) comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict), comb_DF[1].map(cost_dict), comb_DF[2].map(cost_dict), comb_DF[3].map(cost_dict)]) comb_DF['Own%'] = sum([comb_DF[0].map(own_dict), comb_DF[1].map(own_dict), comb_DF[2].map(own_dict), comb_DF[3].map(own_dict)]) elif stack_size == 5: comb_DF['Team'] = comb_DF[0].map(team_dict) comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict), comb_DF[1].map(proj_dict), comb_DF[2].map(proj_dict), comb_DF[3].map(proj_dict), comb_DF[4].map(proj_dict)]) comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict), comb_DF[1].map(cost_dict), comb_DF[2].map(cost_dict), comb_DF[3].map(cost_dict), comb_DF[4].map(cost_dict)]) comb_DF['Own%'] = sum([comb_DF[0].map(own_dict), comb_DF[1].map(own_dict), comb_DF[2].map(own_dict), comb_DF[3].map(own_dict), comb_DF[4].map(own_dict)]) comb_DF = comb_DF.sort_values(by='Proj', ascending=False) comb_DF = comb_DF.loc[comb_DF['Salary'] <= max_sal2] cut_var = 0 if stack_size == 3: while cut_var <= int(len(comb_DF)): try: if int(cut_var) == 0: cur_proj = float(comb_DF.iat[cut_var,4]) cur_own = float(comb_DF.iat[cut_var,6]) elif int(cut_var) >= 1: check_own = float(comb_DF.iat[cut_var,6]) if check_own > cur_own: comb_DF = comb_DF.drop([cut_var]) cur_own = cur_own cut_var = cut_var - 1 comb_DF = comb_DF.reset_index() comb_DF = comb_DF.drop(['index'], axis=1) elif check_own <= cur_own: cur_own = float(comb_DF.iat[cut_var,6]) cut_var = cut_var cut_var += 1 except: cut_var += 1 elif stack_size == 4: while cut_var <= int(len(comb_DF)): try: if int(cut_var) == 0: cur_proj = float(comb_DF.iat[cut_var,5]) cur_own = float(comb_DF.iat[cut_var,7]) elif int(cut_var) >= 1: check_own = float(comb_DF.iat[cut_var,7]) if check_own > cur_own: comb_DF = comb_DF.drop([cut_var]) cur_own = cur_own cut_var = cut_var - 1 comb_DF = comb_DF.reset_index() comb_DF = comb_DF.drop(['index'], axis=1) elif check_own <= cur_own: cur_own = float(comb_DF.iat[cut_var,7]) cut_var = cut_var cut_var += 1 except: cut_var += 1 elif stack_size == 5: while cut_var <= int(len(comb_DF)): try: if int(cut_var) == 0: cur_proj = float(comb_DF.iat[cut_var,6]) cur_own = float(comb_DF.iat[cut_var,8]) elif int(cut_var) >= 1: check_own = float(comb_DF.iat[cut_var,8]) if check_own > cur_own: comb_DF = comb_DF.drop([cut_var]) cur_own = cur_own cut_var = cut_var - 1 comb_DF = comb_DF.reset_index() comb_DF = comb_DF.drop(['index'], axis=1) elif check_own <= cur_own: cur_own = float(comb_DF.iat[cut_var,8]) cut_var = cut_var cut_var += 1 except: cut_var += 1 with stack_hold_container: stack_hold_container = st.empty() st.dataframe(comb_DF.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True) st.download_button( label="Export Tables", data=convert_df_to_csv(comb_DF), file_name='MLB_Stack_Options_export.csv', mime='text/csv', )