NBA_DFS_ROO / app.py
James McCool
ping
e3896eb
raw
history blame
24.5 kB
import streamlit as st
import numpy as np
import pandas as pd
import streamlit as st
import gspread
import pymongo
st.set_page_config(layout="wide")
@st.cache_resource
def init_conn():
scope = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive']
credentials = {
"type": "service_account",
"project_id": "model-sheets-connect",
"private_key_id": st.secrets['model_sheets_connect_pk'],
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDiu1v/e6KBKOcK\ncx0KQ23nZK3ZVvADYy8u/RUn/EDI82QKxTd/DizRLIV81JiNQxDJXSzgkbwKYEDm\n48E8zGvupU8+Nk76xNPakrQKy2Y8+VJlq5psBtGchJTuUSHcXU5Mg2JhQsB376PJ\nsCw552K6Pw8fpeMDJDZuxpKSkaJR6k9G5Dhf5q8HDXnC5Rh/PRFuKJ2GGRpX7n+2\nhT/sCax0J8jfdTy/MDGiDfJqfQrOPrMKELtsGHR9Iv6F4vKiDqXpKfqH+02E9ptz\nBk+MNcbZ3m90M8ShfRu28ebebsASfarNMzc3dk7tb3utHOGXKCf4tF8yYKo7x8BZ\noO9X4gSfAgMBAAECggEAU8ByyMpSKlTCF32TJhXnVJi/kS+IhC/Qn5JUDMuk4LXr\naAEWsWO6kV/ZRVXArjmuSzuUVrXumISapM9Ps5Ytbl95CJmGDiLDwRL815nvv6k3\nUyAS8EGKjz74RpoIoH6E7EWCAzxlnUgTn+5oP9Flije97epYk3H+e2f1f5e1Nn1d\nYNe8U+1HqJgILcxA1TAUsARBfoD7+K3z/8DVPHI8IpzAh6kTHqhqC23Rram4XoQ6\nzj/ZdVBjvnKuazETfsD+Vl3jGLQA8cKQVV70xdz3xwLcNeHsbPbpGBpZUoF73c65\nkAXOrjYl0JD5yAk+hmYhXr6H9c6z5AieuZGDrhmlFQKBgQDzV6LRXmjn4854DP/J\nI82oX2GcI4eioDZPRukhiQLzYerMQBmyqZIRC+/LTCAhYQSjNgMa+ZKyvLqv48M0\n/x398op/+n3xTs+8L49SPI48/iV+mnH7k0WI/ycd4OOKh8rrmhl/0EWb9iitwJYe\nMjTV/QxNEpPBEXfR1/mvrN/lVQKBgQDuhomOxUhWVRVH6x03slmyRBn0Oiw4MW+r\nrt1hlNgtVmTc5Mu+4G0USMZwYuOB7F8xG4Foc7rIlwS7Ic83jMJxemtqAelwOLdV\nXRLrLWJfX8+O1z/UE15l2q3SUEnQ4esPHbQnZowHLm0mdL14qSVMl1mu1XfsoZ3z\nJZTQb48CIwKBgEWbzQRtKD8lKDupJEYqSrseRbK/ax43DDITS77/DWwHl33D3FYC\nMblUm8ygwxQpR4VUfwDpYXBlklWcJovzamXpSnsfcYVkkQH47NuOXPXPkXQsw+w+\nDYcJzeu7F/vZqk9I7oBkWHUrrik9zPNoUzrfPvSRGtkAoTDSwibhoc5dAoGBAMHE\nK0T/ANeZQLNuzQps6S7G4eqjwz5W8qeeYxsdZkvWThOgDd/ewt3ijMnJm5X05hOn\ni4XF1euTuvUl7wbqYx76Wv3/1ZojiNNgy7ie4rYlyB/6vlBS97F4ZxJdxMlabbCW\n6b3EMWa4EVVXKoA1sCY7IVDE+yoQ1JYsZmq45YzPAoGBANWWHuVueFGZRDZlkNlK\nh5OmySmA0NdNug3G1upaTthyaTZ+CxGliwBqMHAwpkIRPwxUJpUwBTSEGztGTAxs\nWsUOVWlD2/1JaKSmHE8JbNg6sxLilcG6WEDzxjC5dLL1OrGOXj9WhC9KX3sq6qb6\nF/j9eUXfXjAlb042MphoF3ZC\n-----END PRIVATE KEY-----\n",
"client_email": "[email protected]",
"client_id": "100369174533302798535",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40model-sheets-connect.iam.gserviceaccount.com"
}
credentials2 = {
"type": "service_account",
"project_id": "sheets-api-connect-378620",
"private_key_id": st.secrets['sheets_api_connect_pk'],
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtKa01beXwc88R\nnPZVQTNPVQuBnbwoOfc66gW3547ja/UEyIGAF112dt/VqHprRafkKGmlg55jqJNt\na4zceLKV+wTm7vBu7lDISTJfGzCf2TrxQYNqwMKE2LOjI69dBM8u4Dcb4k0wcp9v\ntW1ZzLVVuwTvmrg7JBHjiSaB+x5wxm/r3FOiJDXdlAgFlytzqgcyeZMJVKKBQHyJ\njEGg/1720A0numuOCt71w/2G0bDmijuj1e6tH32MwRWcvRNZ19K9ssyDz2S9p68s\nYDhIxX69OWxwScTIHLY6J2t8txf/XMivL/636fPlDADvBEVTdlT606n8CcKUVQeq\npUVdG+lfAgMBAAECggEAP38SUA7B69eTfRpo658ycOs3Amr0JW4H/bb1rNeAul0K\nZhwd/HnU4E07y81xQmey5kN5ZeNrD5EvqkZvSyMJHV0EEahZStwhjCfnDB/cxyix\nZ+kFhv4y9eK+kFpUAhBy5nX6T0O+2T6WvzAwbmbVsZ+X8kJyPuF9m8ldcPlD0sce\ntj8NwVq1ys52eosqs7zi2vjt+eMcaY393l4ls+vNq8Yf27cfyFw45W45CH/97/Nu\n5AmuzlCOAfFF+z4OC5g4rei4E/Qgpxa7/uom+BVfv9G0DIGW/tU6Sne0+37uoGKt\nW6DzhgtebUtoYkG7ZJ05BTXGp2lwgVcNRoPwnKJDxQKBgQDT5wYPUBDW+FHbvZSp\nd1m1UQuXyerqOTA9smFaM8sr/UraeH85DJPEIEk8qsntMBVMhvD3Pw8uIUeFNMYj\naLmZFObsL+WctepXrVo5NB6RtLB/jZYxiKMatMLUJIYtcKIp+2z/YtKiWcLnwotB\nWdCjVnPTxpkurmF2fWP/eewZ+wKBgQDRMtJg7etjvKyjYNQ5fARnCc+XsI3gkBe1\nX9oeXfhyfZFeBXWnZzN1ITgFHplDznmBdxAyYGiQdbbkdKQSghviUQ0igBvoDMYy\n1rWcy+a17Mj98uyNEfmb3X2cC6WpvOZaGHwg9+GY67BThwI3FqHIbyk6Ko09WlTX\nQpRQjMzU7QKBgAfi1iflu+q0LR+3a3vvFCiaToskmZiD7latd9AKk2ocsBd3Woy9\n+hXXecJHPOKV4oUJlJgvAZqe5HGBqEoTEK0wyPNLSQlO/9ypd+0fEnArwFHO7CMF\nycQprAKHJXM1eOOFFuZeQCaInqdPZy1UcV5Szla4UmUZWkk1m24blHzXAoGBAMcA\nyH4qdbxX9AYrC1dvsSRvgcnzytMvX05LU0uF6tzGtG0zVlub4ahvpEHCfNuy44UT\nxRWW/oFFaWjjyFxO5sWggpUqNuHEnRopg3QXx22SRRTGbN45li/+QAocTkgsiRh1\nqEcYZsO4mPCsQqAy6E2p6RcK+Xa+omxvSnVhq0x1AoGAKr8GdkCl4CF6rieLMAQ7\nLNBuuoYGaHoh8l5E2uOQpzwxVy/nMBcAv+2+KqHEzHryUv1owOi6pMLv7A9mTFoS\n18B0QRLuz5fSOsVnmldfC9fpUc6H8cH1SINZpzajqQA74bPwELJjnzrCnH79TnHG\nJuElxA33rFEjbgbzdyrE768=\n-----END PRIVATE KEY-----\n",
"client_email": "gspread-connection@sheets-api-connect-378620.iam.gserviceaccount.com",
"client_id": "106625872877651920064",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40sheets-api-connect-378620.iam.gserviceaccount.com"
}
uri = st.secrets['mongo_uri']
client = pymongo.MongoClient(uri, retryWrites=True, serverSelectionTimeoutMS=500000)
db = client["NBA_DFS"]
NBA_Data = st.secrets['NBA_Data']
gc = gspread.service_account_from_dict(credentials)
gc2 = gspread.service_account_from_dict(credentials2)
return gc, gc2, db, NBA_Data
gcservice_account, gcservice_account2, db, NBA_Data = init_conn()
dk_columns = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'FLEX', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count', 'Own']
fd_columns = ['PG1', 'PG2', 'SG1', 'SG2', 'SF1', 'SF2', 'PF1', 'PF2', 'C1', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count', 'Own']
@st.cache_data(ttl=300)
def load_overall_stats():
try:
sh = gcservice_account.open_by_url(NBA_Data)
except:
sh = gcservice_account2.open_by_url(NBA_Data)
worksheet = sh.worksheet('DK_Build_Up')
raw_display = pd.DataFrame(worksheet.get_all_records())
raw_display.rename(columns={"Name": "Player", "Nickname": "Player", "Fantasy": "Median"}, inplace = True)
raw_display = raw_display.loc[raw_display['Salary'] > 0]
raw_display = raw_display.loc[raw_display['Median'] > 0]
raw_display = raw_display.apply(pd.to_numeric, errors='ignore')
dk_raw = raw_display.sort_values(by='Median', ascending=False)
worksheet = sh.worksheet('FD_Build_Up')
raw_display = pd.DataFrame(worksheet.get_all_records())
raw_display.rename(columns={"Name": "Player", "Nickname": "Player", "Fantasy": "Median"}, inplace = True)
raw_display = raw_display.loc[raw_display['Median'] > 0]
raw_display = raw_display.apply(pd.to_numeric, errors='ignore')
fd_raw = raw_display.sort_values(by='Median', ascending=False)
worksheet = sh.worksheet('Secondary_DK_Build')
raw_display = pd.DataFrame(worksheet.get_all_records())
raw_display.rename(columns={"Name": "Player", "Nickname": "Player", "Fantasy": "Median"}, inplace = True)
raw_display = raw_display.loc[raw_display['Median'] > 0]
raw_display = raw_display.apply(pd.to_numeric, errors='ignore')
dk_raw_sec = raw_display.sort_values(by='Median', ascending=False)
worksheet = sh.worksheet('Secondary_FD_Build')
raw_display = pd.DataFrame(worksheet.get_all_records())
raw_display.rename(columns={"Name": "Player", "Nickname": "Player", "Fantasy": "Median"}, inplace = True)
raw_display = raw_display.loc[raw_display['Median'] > 0]
raw_display = raw_display.apply(pd.to_numeric, errors='ignore')
fd_raw_sec = raw_display.sort_values(by='Median', ascending=False)
worksheet = sh.worksheet('Player_Level_ROO')
raw_display = pd.DataFrame(worksheet.get_all_records())
raw_display = raw_display.loc[raw_display['Median'] > 0]
raw_display = raw_display.apply(pd.to_numeric, errors='ignore')
roo_raw = raw_display.sort_values(by='Median', ascending=False)
timestamp = raw_display['timestamp'].values[0]
worksheet = sh.worksheet('ROO_Backlog')
raw_display = pd.DataFrame(worksheet.get_all_records())
roo_backlog = raw_display.sort_values(by='Date', ascending=False)
roo_backlog = roo_backlog[roo_backlog['slate'] == 'Main Slate']
return dk_raw, fd_raw, dk_raw_sec, fd_raw_sec, roo_raw, timestamp, roo_backlog
@st.cache_data(ttl = 300)
def init_DK_lineups():
collection = db["DK_NBA_seed_frame"]
cursor = collection.find().limit(10000)
raw_display = pd.DataFrame(list(cursor))
raw_display = raw_display[['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'FLEX', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count', 'Own']]
DK_seed = raw_display.to_numpy()
return DK_seed
@st.cache_data(ttl = 300)
def init_FD_lineups():
collection = db["FD_NBA_seed_frame"]
cursor = collection.find().limit(10000)
raw_display = pd.DataFrame(list(cursor))
raw_display = raw_display[['PG1', 'PG2', 'SG1', 'SG2', 'SF1', 'SF2', 'PF1', 'PF2', 'C1', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count', 'Own']]
FD_seed = raw_display.to_numpy()
return FD_seed
def convert_df_to_csv(df):
return df.to_csv().encode('utf-8')
@st.cache_data
def convert_df(array):
array = pd.DataFrame(array, columns=column_names)
return array.to_csv().encode('utf-8')
dk_raw, fd_raw, dk_raw_sec, fd_raw_sec, roo_raw, timestamp, roo_backlog = load_overall_stats()
try:
dk_lineups = init_DK_lineups()
fd_lineups = init_FD_lineups()
except:
dk_lineups = pd.DataFrame(columns=dk_columns)
fd_lineups = pd.DataFrame(columns=fd_columns)
t_stamp = f"Last Update: " + str(timestamp) + f" CST"
tab1, tab2 = st.tabs(['Range of Outcomes', 'Optimals'])
with tab1:
col1, col2 = st.columns([1, 9])
with col1:
st.info(t_stamp)
if st.button("Load/Reset Data", key='reset1'):
st.cache_data.clear()
dk_raw, fd_raw, dk_raw_sec, fd_raw_sec, roo_raw, timestamp, roo_backlog = load_overall_stats()
dk_lineups = init_DK_lineups()
fd_lineups = init_FD_lineups()
t_stamp = f"Last Update: " + str(timestamp) + f" CST"
for key in st.session_state.keys():
del st.session_state[key]
site_var2 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var2')
if site_var2 == 'Draftkings':
site_baselines = roo_raw[roo_raw['site'] == 'Draftkings']
site_backlog = roo_backlog[roo_backlog['site'] == 'Draftkings']
elif site_var2 == 'Fanduel':
site_baselines = roo_raw[roo_raw['site'] == 'Fanduel']
site_backlog = roo_backlog[roo_backlog['site'] == 'Fanduel']
slate_split = st.radio("Are you viewing the main slate or the secondary slate?", ('Main Slate', 'Secondary', 'Backlog'), key='slate_split')
if slate_split == 'Main Slate':
raw_baselines = site_baselines[site_baselines['slate'] == 'Main Slate']
elif slate_split == 'Secondary':
raw_baselines = site_baselines[site_baselines['slate'] == 'Secondary Slate']
elif slate_split == 'Backlog':
raw_baselines = site_backlog
view_all = st.checkbox("Do you want to view all dates?", key='view_all')
if view_all:
raw_baselines = raw_baselines
raw_baselines = raw_baselines.sort_values(by=['Median', 'Date'], ascending=[False, False])
else:
date_var2 = st.date_input("Which date would you like to view?", key='date_var2')
raw_baselines = raw_baselines[raw_baselines['Date'] == date_var2.strftime('%m-%d-%Y')]
raw_baselines = raw_baselines.sort_values(by='Median', ascending=False)
split_var2 = st.radio("Are you running the full slate or certain games?", ('Full Slate Run', 'Specific Games'), key='split_var2')
if split_var2 == 'Specific Games':
team_var2 = st.multiselect('Which teams would you like to include in the ROO?', options = raw_baselines['Team'].unique(), key='team_var2')
elif split_var2 == 'Full Slate Run':
team_var2 = raw_baselines.Team.values.tolist()
pos_var2 = st.selectbox('View specific position?', options = ['All', 'PG', 'SG', 'SF', 'PF', 'C'], key='pos_var2')
with col2:
display_container_1 = st.empty()
display_dl_container_1 = st.empty()
display_proj = raw_baselines[raw_baselines['Team'].isin(team_var2)]
display_proj = display_proj.drop(columns=['site', 'version', 'slate', 'timestamp'])
st.session_state.display_proj = display_proj
with display_container_1:
display_container = st.empty()
if 'display_proj' in st.session_state:
if pos_var2 == 'All':
st.session_state.display_proj = st.session_state.display_proj
elif pos_var2 != 'All':
st.session_state.display_proj = st.session_state.display_proj[st.session_state.display_proj['Position'].str.contains(pos_var2)]
st.dataframe(st.session_state.display_proj.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), height=1000, use_container_width = True)
with display_dl_container_1:
display_dl_container = st.empty()
if 'display_proj' in st.session_state:
st.download_button(
label="Export Tables",
data=convert_df_to_csv(st.session_state.display_proj),
file_name='NBA_ROO_export.csv',
mime='text/csv',
)
with tab2:
col1, col2 = st.columns([1, 7])
with col1:
if st.button("Load/Reset Data", key='reset2'):
st.cache_data.clear()
dk_raw, fd_raw, dk_raw_sec, fd_raw_sec, roo_raw, timestamp, roo_backlog = load_overall_stats()
dk_lineups = init_DK_lineups()
fd_lineups = init_FD_lineups()
t_stamp = f"Last Update: " + str(timestamp) + f" CST"
for key in st.session_state.keys():
del st.session_state[key]
slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'Just the Main Slate'))
site_var1 = st.radio("What site are you working with?", ('Draftkings', 'Fanduel'))
lineup_num_var = st.number_input("How many lineups do you want to display?", min_value=1, max_value=500, value=10, step=1)
if site_var1 == 'Draftkings':
raw_baselines = dk_raw
# Get the minimum and maximum ownership values from dk_lineups
min_own = np.min(dk_lineups[:,14])
max_own = np.max(dk_lineups[:,14])
column_names = dk_columns
player_var1 = st.radio("Do you want a frame with specific Players?", ('Full Slate', 'Specific Players'), key='player_var1')
if player_var1 == 'Specific Players':
player_var2 = st.multiselect('Which players do you want?', options = dk_raw['Player'].unique())
elif player_var1 == 'Full Slate':
player_var2 = dk_raw.Player.values.tolist()
elif site_var1 == 'Fanduel':
raw_baselines = fd_raw
min_own = np.min(fd_lineups[:,15])
max_own = np.max(fd_lineups[:,15])
column_names = fd_columns
player_var1 = st.radio("Do you want a frame with specific Players?", ('Full Slate', 'Specific Players'), key='player_var1')
if player_var1 == 'Specific Players':
player_var2 = st.multiselect('Which players do you want?', options = fd_raw['Player'].unique())
elif player_var1 == 'Full Slate':
player_var2 = fd_raw.Player.values.tolist()
if st.button("Prepare data export", key='data_export'):
data_export = st.session_state.working_seed.copy()
st.download_button(
label="Export optimals set",
data=convert_df(data_export),
file_name='NBA_optimals_export.csv',
mime='text/csv',
)
with col2:
if site_var1 == 'Draftkings':
if 'working_seed' in st.session_state:
st.session_state.working_seed = st.session_state.working_seed
if player_var1 == 'Specific Players':
st.session_state.working_seed = st.session_state.working_seed[np.equal.outer(st.session_state.working_seed, player_var2).any(axis=1).all(axis=1)]
elif player_var1 == 'Full Slate':
st.session_state.working_seed = dk_lineups.copy()
st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:lineup_num_var], columns=column_names)
elif 'working_seed' not in st.session_state:
st.session_state.working_seed = dk_lineups.copy()
st.session_state.working_seed = st.session_state.working_seed
if player_var1 == 'Specific Players':
st.session_state.working_seed = st.session_state.working_seed[np.equal.outer(st.session_state.working_seed, player_var2).any(axis=1).all(axis=1)]
elif player_var1 == 'Full Slate':
st.session_state.working_seed = dk_lineups.copy()
st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:lineup_num_var], columns=column_names)
elif site_var1 == 'Fanduel':
if 'working_seed' in st.session_state:
st.session_state.working_seed = st.session_state.working_seed
if player_var1 == 'Specific Players':
st.session_state.working_seed = st.session_state.working_seed[np.equal.outer(st.session_state.working_seed, player_var2).any(axis=1).all(axis=1)]
elif player_var1 == 'Full Slate':
st.session_state.working_seed = fd_lineups.copy()
st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:lineup_num_var], columns=column_names)
elif 'working_seed' not in st.session_state:
st.session_state.working_seed = fd_lineups.copy()
st.session_state.working_seed = st.session_state.working_seed
if player_var1 == 'Specific Players':
st.session_state.working_seed = st.session_state.working_seed[np.equal.outer(st.session_state.working_seed, player_var2).any(axis=1).all(axis=1)]
elif player_var1 == 'Full Slate':
st.session_state.working_seed = fd_lineups.copy()
st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:lineup_num_var], columns=column_names)
with st.container():
if st.button("Reset Optimals", key='reset3'):
for key in st.session_state.keys():
del st.session_state[key]
if site_var1 == 'Draftkings':
st.session_state.working_seed = dk_lineups.copy()
elif site_var1 == 'Fanduel':
st.session_state.working_seed = fd_lineups.copy()
if 'data_export_display' in st.session_state:
st.dataframe(st.session_state.data_export_display.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), height=500, use_container_width = True)
with st.container():
if 'working_seed' in st.session_state:
# Create a new dataframe with summary statistics
if site_var1 == 'Draftkings':
summary_df = pd.DataFrame({
'Metric': ['Min', 'Average', 'Max', 'STDdev'],
'Salary': [
np.min(st.session_state.working_seed[:,8]),
np.mean(st.session_state.working_seed[:,8]),
np.max(st.session_state.working_seed[:,8]),
np.std(st.session_state.working_seed[:,8])
],
'Proj': [
np.min(st.session_state.working_seed[:,9]),
np.mean(st.session_state.working_seed[:,9]),
np.max(st.session_state.working_seed[:,9]),
np.std(st.session_state.working_seed[:,9])
],
'Own': [
np.min(st.session_state.working_seed[:,14]),
np.mean(st.session_state.working_seed[:,14]),
np.max(st.session_state.working_seed[:,14]),
np.std(st.session_state.working_seed[:,14])
]
})
elif site_var1 == 'Fanduel':
summary_df = pd.DataFrame({
'Metric': ['Min', 'Average', 'Max', 'STDdev'],
'Salary': [
np.min(st.session_state.working_seed[:,9]),
np.mean(st.session_state.working_seed[:,9]),
np.max(st.session_state.working_seed[:,9]),
np.std(st.session_state.working_seed[:,9])
],
'Proj': [
np.min(st.session_state.working_seed[:,10]),
np.mean(st.session_state.working_seed[:,10]),
np.max(st.session_state.working_seed[:,10]),
np.std(st.session_state.working_seed[:,10])
],
'Own': [
np.min(st.session_state.working_seed[:,15]),
np.mean(st.session_state.working_seed[:,15]),
np.max(st.session_state.working_seed[:,15]),
np.std(st.session_state.working_seed[:,15])
]
})
# Set the index of the summary dataframe as the "Metric" column
summary_df = summary_df.set_index('Metric')
# Display the summary dataframe
st.subheader("Optimal Statistics")
st.dataframe(summary_df.style.format({
'Salary': '{:.2f}',
'Proj': '{:.2f}',
'Own': '{:.2f}'
}).background_gradient(cmap='RdYlGn', axis=0, subset=['Salary', 'Proj', 'Own']), use_container_width=True)
with st.container():
if 'data_export_display' in st.session_state:
if site_var1 == 'Draftkings':
player_columns = st.session_state.data_export_display.iloc[:, :8]
elif site_var1 == 'Fanduel':
player_columns = st.session_state.data_export_display.iloc[:, :9]
# Flatten the DataFrame and count unique values
value_counts = player_columns.values.flatten().tolist()
value_counts = pd.Series(value_counts).value_counts()
percentages = (value_counts / lineup_num_var * 100).round(2)
# Create a DataFrame with the results
summary_df = pd.DataFrame({
'Player': value_counts.index,
'Frequency': value_counts.values,
'Percentage': percentages.values
})
# Sort by frequency in descending order
summary_df = summary_df.sort_values('Frequency', ascending=False)
# Display the table
st.write("Player Frequency Table:")
st.dataframe(summary_df.style.format({'Percentage': '{:.2f}%'}), height=500, use_container_width=True)