Spaces:
Running
Running
File size: 9,226 Bytes
6046583 11362fa 6046583 cc94f0d 6046583 11362fa 6046583 11362fa 6046583 11362fa 6046583 11362fa 6c4ff50 6046583 11362fa 6046583 11362fa 6046583 11362fa 6046583 11362fa 6046583 11362fa 6046583 11362fa 71f0d1e 11362fa 71f0d1e 6046583 11362fa f1c70e2 11362fa 71f0d1e 11362fa cc94f0d 11362fa 6046583 11362fa 6046583 11362fa 6046583 11362fa 5719350 242444c 11362fa 6046583 11362fa 6046583 11362fa 6046583 11362fa 5719350 242444c 11362fa |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 |
import streamlit as st
st.set_page_config(layout="wide")
for name in dir():
if not name.startswith('_'):
del globals()[name]
import pulp
import numpy as np
import pandas as pd
import streamlit as st
import gspread
from itertools import combinations
@st.cache_resource
def init_conn():
scope = ['https://www.googleapis.com/auth/spreadsheets',
"https://www.googleapis.com/auth/drive"]
credentials = {
"type": "service_account",
"project_id": "sheets-api-connect-378620",
"private_key_id": "1005124050c80d085e2c5b344345715978dd9cc9",
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtKa01beXwc88R\nnPZVQTNPVQuBnbwoOfc66gW3547ja/UEyIGAF112dt/VqHprRafkKGmlg55jqJNt\na4zceLKV+wTm7vBu7lDISTJfGzCf2TrxQYNqwMKE2LOjI69dBM8u4Dcb4k0wcp9v\ntW1ZzLVVuwTvmrg7JBHjiSaB+x5wxm/r3FOiJDXdlAgFlytzqgcyeZMJVKKBQHyJ\njEGg/1720A0numuOCt71w/2G0bDmijuj1e6tH32MwRWcvRNZ19K9ssyDz2S9p68s\nYDhIxX69OWxwScTIHLY6J2t8txf/XMivL/636fPlDADvBEVTdlT606n8CcKUVQeq\npUVdG+lfAgMBAAECggEAP38SUA7B69eTfRpo658ycOs3Amr0JW4H/bb1rNeAul0K\nZhwd/HnU4E07y81xQmey5kN5ZeNrD5EvqkZvSyMJHV0EEahZStwhjCfnDB/cxyix\nZ+kFhv4y9eK+kFpUAhBy5nX6T0O+2T6WvzAwbmbVsZ+X8kJyPuF9m8ldcPlD0sce\ntj8NwVq1ys52eosqs7zi2vjt+eMcaY393l4ls+vNq8Yf27cfyFw45W45CH/97/Nu\n5AmuzlCOAfFF+z4OC5g4rei4E/Qgpxa7/uom+BVfv9G0DIGW/tU6Sne0+37uoGKt\nW6DzhgtebUtoYkG7ZJ05BTXGp2lwgVcNRoPwnKJDxQKBgQDT5wYPUBDW+FHbvZSp\nd1m1UQuXyerqOTA9smFaM8sr/UraeH85DJPEIEk8qsntMBVMhvD3Pw8uIUeFNMYj\naLmZFObsL+WctepXrVo5NB6RtLB/jZYxiKMatMLUJIYtcKIp+2z/YtKiWcLnwotB\nWdCjVnPTxpkurmF2fWP/eewZ+wKBgQDRMtJg7etjvKyjYNQ5fARnCc+XsI3gkBe1\nX9oeXfhyfZFeBXWnZzN1ITgFHplDznmBdxAyYGiQdbbkdKQSghviUQ0igBvoDMYy\n1rWcy+a17Mj98uyNEfmb3X2cC6WpvOZaGHwg9+GY67BThwI3FqHIbyk6Ko09WlTX\nQpRQjMzU7QKBgAfi1iflu+q0LR+3a3vvFCiaToskmZiD7latd9AKk2ocsBd3Woy9\n+hXXecJHPOKV4oUJlJgvAZqe5HGBqEoTEK0wyPNLSQlO/9ypd+0fEnArwFHO7CMF\nycQprAKHJXM1eOOFFuZeQCaInqdPZy1UcV5Szla4UmUZWkk1m24blHzXAoGBAMcA\nyH4qdbxX9AYrC1dvsSRvgcnzytMvX05LU0uF6tzGtG0zVlub4ahvpEHCfNuy44UT\nxRWW/oFFaWjjyFxO5sWggpUqNuHEnRopg3QXx22SRRTGbN45li/+QAocTkgsiRh1\nqEcYZsO4mPCsQqAy6E2p6RcK+Xa+omxvSnVhq0x1AoGAKr8GdkCl4CF6rieLMAQ7\nLNBuuoYGaHoh8l5E2uOQpzwxVy/nMBcAv+2+KqHEzHryUv1owOi6pMLv7A9mTFoS\n18B0QRLuz5fSOsVnmldfC9fpUc6H8cH1SINZpzajqQA74bPwELJjnzrCnH79TnHG\nJuElxA33rFEjbgbzdyrE768=\n-----END PRIVATE KEY-----\n",
"client_email": "gspread-connection@sheets-api-connect-378620.iam.gserviceaccount.com",
"client_id": "106625872877651920064",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40sheets-api-connect-378620.iam.gserviceaccount.com"
}
gc = gspread.service_account_from_dict(credentials)
return gc
gspreadcon = init_conn()
player_roo_format = {'Top_finish': '{:.2%}','Top_5_finish': '{:.2%}', 'Top_10_finish': '{:.2%}', '20+%': '{:.2%}', '2x%': '{:.2%}', '3x%': '{:.2%}',
'4x%': '{:.2%}'}
all_dk_player_projections = 'https://docs.google.com/spreadsheets/d/1NmKa-b-2D3w7rRxwMPSchh31GKfJ1XcDI2GU8rXWnHI/edit#gid=1401252991'
@st.cache_resource(ttl=600)
def player_stat_table():
sh = gspreadcon.open_by_url(all_dk_player_projections)
worksheet = sh.worksheet('Player_Level_ROO')
player_frame = pd.DataFrame(worksheet.get_all_records())
sh = gspreadcon.open_by_url(all_dk_player_projections)
worksheet = sh.worksheet('Player_Lines_ROO')
line_frame = pd.DataFrame(worksheet.get_all_records())
sh = gspreadcon.open_by_url(all_dk_player_projections)
worksheet = sh.worksheet('Player_PowerPlay_ROO')
pp_frame = pd.DataFrame(worksheet.get_all_records())
sh = gspreadcon.open_by_url(all_dk_player_projections)
worksheet = sh.worksheet('Timestamp')
timestamp = worksheet.acell('A1').value
return player_frame, line_frame, pp_frame, timestamp
@st.cache_data
def convert_df_to_csv(df):
return df.to_csv().encode('utf-8')
player_frame, line_frame, pp_frame, timestamp = player_stat_table()
t_stamp = f"Last Update: " + str(timestamp) + f" CST"
tab1, tab2, tab3 = st.tabs(["Player Range of Outcomes", "Line Combo Range of Outcomes", "Power Play Range of Outcomes"])
with tab1:
col1, col2 = st.columns([1, 7])
with col1:
st.info(t_stamp)
if st.button("Load/Reset Data", key='reset1'):
st.cache_data.clear()
player_frame, line_frame, pp_frame, timestamp = player_stat_table()
t_stamp = f"Last Update: " + str(timestamp) + f" CST"
site_var1 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var1')
split_var1 = st.radio("Would you like to view the whole slate or just specific games?", ('Full Slate Run', 'Specific Games'), key='split_var1')
if split_var1 == 'Specific Games':
team_var1 = st.multiselect('Which teams would you like to include in the ROO?', options = player_frame['Team'].unique(), key='team_var1')
elif split_var1 == 'Full Slate Run':
team_var1 = player_frame.Team.values.tolist()
pos_split1 = st.radio("Are you viewing all positions, specific groups, or specific positions?", ('All Positions', 'Specific Positions'), key='pos_split1')
if pos_split1 == 'Specific Positions':
pos_var1 = st.multiselect('What Positions would you like to view?', options = ['C', 'W', 'D', 'G'])
elif pos_split1 == 'All Positions':
pos_var1 = 'All'
sal_var1 = st.slider("Is there a certain price range you want to view?", 2000, 10000, (2000, 20000), key='sal_var1')
with col2:
final_Proj = player_frame[player_frame['Site'] == str(site_var1)]
final_Proj = final_Proj[final_Proj['Type'] == 'Basic']
final_Proj = final_Proj[player_frame['Team'].isin(team_var1)]
final_Proj = final_Proj[final_Proj['Salary'] >= sal_var1[0]]
final_Proj = final_Proj[final_Proj['Salary'] <= sal_var1[1]]
if pos_var1 != 'All':
final_Proj = final_Proj[final_Proj['Position'].str.contains('|'.join(pos_var1))]
final_Proj = final_Proj.set_index('Player')
final_Proj = final_Proj.sort_values(by='Median', ascending=False)
if pos_var1 == 'All':
final_Proj = final_Proj.set_index('Player')
final_Proj = final_Proj.sort_values(by='Median', ascending=False)
st.dataframe(final_Proj.iloc[:, :-3].style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(player_roo_format, precision=2), use_container_width = True)
st.download_button(
label="Export Tables",
data=convert_df_to_csv(final_Proj),
file_name='NHL_player_export.csv',
mime='text/csv',
)
with tab2:
col1, col2 = st.columns([1, 7])
with col1:
st.info(t_stamp)
if st.button("Load/Reset Data", key='reset2'):
st.cache_data.clear()
player_frame, line_frame, pp_frame, timestamp = player_stat_table()
t_stamp = f"Last Update: " + str(timestamp) + f" CST"
site_var2 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var2')
with col2:
final_line_combos = line_frame[line_frame['Site'] == str(site_var2)]
final_line_combos = final_line_combos[final_line_combos['Type'] == 'Basic']
final_line_combos = final_line_combos.sort_values(by='Median', ascending=False)
st.dataframe(final_line_combos.iloc[:, :-3].style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True)
st.download_button(
label="Export Tables",
data=convert_df_to_csv(final_Proj),
file_name='NHL_linecombos_export.csv',
mime='text/csv',
)
with tab3:
col1, col2 = st.columns([1, 7])
with col1:
st.info(t_stamp)
if st.button("Load/Reset Data", key='reset3'):
st.cache_data.clear()
player_frame, line_frame, pp_frame, timestamp = player_stat_table()
t_stamp = f"Last Update: " + str(timestamp) + f" CST"
site_var3 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var3')
with col2:
final_pp_combos = pp_frame[pp_frame['Site'] == str(site_var3)]
final_pp_combos = final_pp_combos[final_pp_combos['Type'] == 'Basic']
final_pp_combos = final_pp_combos.sort_values(by='Median', ascending=False)
st.dataframe(final_pp_combos.iloc[:, :-3].style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True)
st.download_button(
label="Export Tables",
data=convert_df_to_csv(final_Proj),
file_name='NHL_powerplay_export.csv',
mime='text/csv',
) |