James McCool
commited on
Commit
·
43df572
1
Parent(s):
70d6b20
Add small field preset functionality in app.py: integrate new preset options for lineup targeting, enhancing user experience and providing better control over portfolio management.
Browse files- app.py +12 -84
- global_func/small_field_preset.py +20 -0
app.py
CHANGED
@@ -20,6 +20,7 @@ from global_func.load_csv import load_csv
|
|
20 |
from global_func.find_csv_mismatches import find_csv_mismatches
|
21 |
from global_func.trim_portfolio import trim_portfolio
|
22 |
from global_func.get_portfolio_names import get_portfolio_names
|
|
|
23 |
|
24 |
freq_format = {'Finish_percentile': '{:.2%}', 'Lineup Edge': '{:.2%}', 'Win%': '{:.2%}'}
|
25 |
stacking_sports = ['MLB', 'NHL', 'NFL']
|
@@ -50,7 +51,7 @@ with tab1:
|
|
50 |
|
51 |
with col1:
|
52 |
st.subheader("Draftkings/Fanduel CSV")
|
53 |
-
st.info("Upload the player pricing CSV from the site you are playing on
|
54 |
|
55 |
upload_csv_col, csv_template_col = st.columns([3, 1])
|
56 |
with upload_csv_col:
|
@@ -80,7 +81,7 @@ with tab1:
|
|
80 |
|
81 |
with col2:
|
82 |
st.subheader("Portfolio File")
|
83 |
-
st.info("Go ahead and upload a portfolio file here. Only include player columns
|
84 |
|
85 |
upload_toggle = st.selectbox("What source are you uploading from?", options=['SaberSim (Just IDs)', 'Draftkings/Fanduel (Names + IDs)', 'Other (Just Names)'])
|
86 |
if upload_toggle == 'SaberSim (Just IDs)' or upload_toggle == 'Draftkings/Fanduel (Names + IDs)':
|
@@ -124,7 +125,6 @@ with tab1:
|
|
124 |
st.session_state['portfolio'] = st.session_state['portfolio'].drop(columns=['Stack'])
|
125 |
else:
|
126 |
stack_dict = None
|
127 |
-
st.info("No Stack column found in portfolio")
|
128 |
if st.session_state['portfolio'] is not None:
|
129 |
st.success('Portfolio file loaded successfully!')
|
130 |
st.session_state['portfolio'] = st.session_state['portfolio'].apply(lambda x: x.replace(player_wrong_names_mlb, player_right_names_mlb))
|
@@ -132,7 +132,7 @@ with tab1:
|
|
132 |
|
133 |
with col3:
|
134 |
st.subheader("Projections File")
|
135 |
-
st.info("upload a projections file that has 'player_names', 'salary', 'median', 'ownership', and 'captain ownership'
|
136 |
|
137 |
# Create two columns for the uploader and template button
|
138 |
upload_col, template_col = st.columns([3, 1])
|
@@ -1000,7 +1000,7 @@ with tab2:
|
|
1000 |
parsed_frame = parsed_frame[~parsed_frame['Stack'].isin(stack_remove)]
|
1001 |
else:
|
1002 |
parsed_frame = parsed_frame
|
1003 |
-
st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False)
|
1004 |
st.session_state['export_merge'] = st.session_state['working_frame'].copy()
|
1005 |
|
1006 |
with st.expander('Micro Filter Options'):
|
@@ -1065,11 +1065,10 @@ with tab2:
|
|
1065 |
if size_include:
|
1066 |
parsed_frame = parsed_frame[parsed_frame['Size'].isin(size_include)]
|
1067 |
|
1068 |
-
st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False)
|
1069 |
st.session_state['export_merge'] = st.session_state['working_frame'].copy()
|
1070 |
|
1071 |
with st.expander('Trimming Options'):
|
1072 |
-
st.info("Make sure you filter before trimming if you want to filter, trimming before a filter will reset your portfolio")
|
1073 |
with st.form(key='trim_form'):
|
1074 |
st.write("Sorting and trimming variables:")
|
1075 |
perf_var, own_var = st.columns(2)
|
@@ -1102,85 +1101,14 @@ with tab2:
|
|
1102 |
st.session_state['working_frame'] = trim_portfolio(parsed_frame, trim_slack_var, performance_type, own_type, performance_threshold_high, performance_threshold_low, own_threshold_high, own_threshold_low)
|
1103 |
st.session_state['working_frame'] = st.session_state['working_frame'].sort_values(by='median', ascending=False)
|
1104 |
st.session_state['export_merge'] = st.session_state['working_frame'].copy()
|
|
|
|
|
|
|
|
|
|
|
|
|
1105 |
|
1106 |
with col2:
|
1107 |
-
# with st.expander("Download options"):
|
1108 |
-
# if stack_dict is not None:
|
1109 |
-
# download_type = st.selectbox("Simple or Advanced Download?", options=['Simple', 'Advanced'], key='download_choice')
|
1110 |
-
# if download_type == 'Simple':
|
1111 |
-
# st.session_state['export_file'] = st.session_state['working_frame'].copy()
|
1112 |
-
# for col in st.session_state['export_file'].columns:
|
1113 |
-
# if col not in excluded_cols:
|
1114 |
-
# st.session_state['export_file'][col] = st.session_state['export_file'][col].map(st.session_state['export_dict'])
|
1115 |
-
# else:
|
1116 |
-
# with st.form(key='stack_form'):
|
1117 |
-
# st.subheader("Stack Count Adjustments")
|
1118 |
-
# st.info("This allows you to fine tune the stacks that you wish to export. If you want to make sure you don't export any of a specific stack you can 0 it out.")
|
1119 |
-
# # Create a container for stack value inputs
|
1120 |
-
# sort_container = st.container()
|
1121 |
-
# with sort_container:
|
1122 |
-
# sort_var = st.selectbox("Sort export portfolio by:", options=['median', 'Lineup Edge', 'Own'])
|
1123 |
-
|
1124 |
-
# # Get unique stack values
|
1125 |
-
# unique_stacks = sorted(list(set(stack_dict.values())))
|
1126 |
-
|
1127 |
-
# # Create a dictionary to store stack multipliers
|
1128 |
-
# if 'stack_multipliers' not in st.session_state:
|
1129 |
-
# st.session_state.stack_multipliers = {stack: 0.0 for stack in unique_stacks}
|
1130 |
-
|
1131 |
-
# # Create columns for the stack inputs
|
1132 |
-
# num_cols = 6 # Number of columns to display
|
1133 |
-
# for i in range(0, len(unique_stacks), num_cols):
|
1134 |
-
# cols = st.columns(num_cols)
|
1135 |
-
# for j, stack in enumerate(unique_stacks[i:i+num_cols]):
|
1136 |
-
# with cols[j]:
|
1137 |
-
# # Create a unique key for each number input
|
1138 |
-
# key = f"stack_count_{stack}"
|
1139 |
-
# # Get the current count of this stack in the portfolio
|
1140 |
-
# current_stack_count = len(st.session_state['working_frame'][st.session_state['working_frame']['Stack'] == stack])
|
1141 |
-
# # Create number input with current value and max value based on actual count
|
1142 |
-
# st.session_state.stack_multipliers[stack] = st.number_input(
|
1143 |
-
# f"{stack} count",
|
1144 |
-
# min_value=0.0,
|
1145 |
-
# max_value=float(current_stack_count),
|
1146 |
-
# value=0.0,
|
1147 |
-
# step=1.0,
|
1148 |
-
# key=key
|
1149 |
-
# )
|
1150 |
-
|
1151 |
-
# portfolio_copy = st.session_state['working_frame'].copy()
|
1152 |
-
|
1153 |
-
# submitted = st.form_submit_button("Submit")
|
1154 |
-
# if submitted:
|
1155 |
-
# # Create a list to store selected rows
|
1156 |
-
# selected_rows = []
|
1157 |
-
|
1158 |
-
# # For each stack, select the top N rows based on the count value
|
1159 |
-
# for stack in unique_stacks:
|
1160 |
-
# if stack in st.session_state.stack_multipliers:
|
1161 |
-
# count = int(st.session_state.stack_multipliers[stack])
|
1162 |
-
# # Get rows for this stack
|
1163 |
-
# stack_rows = portfolio_copy[portfolio_copy['Stack'] == stack]
|
1164 |
-
# # Sort by median and take top N rows
|
1165 |
-
# top_rows = stack_rows.nlargest(count, sort_var)
|
1166 |
-
# selected_rows.append(top_rows)
|
1167 |
-
|
1168 |
-
# # Combine all selected rows
|
1169 |
-
# portfolio_concat = pd.concat(selected_rows)
|
1170 |
-
|
1171 |
-
# # Update export_file with filtered data
|
1172 |
-
# st.session_state['export_file'] = portfolio_concat.copy()
|
1173 |
-
# for col in st.session_state['export_file'].columns:
|
1174 |
-
# if col not in excluded_cols:
|
1175 |
-
# st.session_state['export_file'][col] = st.session_state['export_file'][col].map(st.session_state['export_dict'])
|
1176 |
-
# st.write('Export portfolio updated!')
|
1177 |
-
# else:
|
1178 |
-
# st.session_state['export_file'] = st.session_state['working_frame'].copy()
|
1179 |
-
# if 'export_base' not in st.session_state:
|
1180 |
-
# st.session_state['export_base'] = pd.DataFrame(columns=st.session_state['working_frame'].columns)
|
1181 |
-
# for col in st.session_state['export_file'].columns:
|
1182 |
-
# if col not in excluded_cols:
|
1183 |
-
# st.session_state['export_file'][col] = st.session_state['export_file'][col].map(st.session_state['export_dict'])
|
1184 |
if 'export_base' not in st.session_state:
|
1185 |
st.session_state['export_base'] = pd.DataFrame(columns=st.session_state['working_frame'].columns)
|
1186 |
|
|
|
20 |
from global_func.find_csv_mismatches import find_csv_mismatches
|
21 |
from global_func.trim_portfolio import trim_portfolio
|
22 |
from global_func.get_portfolio_names import get_portfolio_names
|
23 |
+
from global_func.small_field_preset import small_field_preset
|
24 |
|
25 |
freq_format = {'Finish_percentile': '{:.2%}', 'Lineup Edge': '{:.2%}', 'Win%': '{:.2%}'}
|
26 |
stacking_sports = ['MLB', 'NHL', 'NFL']
|
|
|
51 |
|
52 |
with col1:
|
53 |
st.subheader("Draftkings/Fanduel CSV")
|
54 |
+
st.info("Upload the player pricing CSV from the site you are playing on")
|
55 |
|
56 |
upload_csv_col, csv_template_col = st.columns([3, 1])
|
57 |
with upload_csv_col:
|
|
|
81 |
|
82 |
with col2:
|
83 |
st.subheader("Portfolio File")
|
84 |
+
st.info("Go ahead and upload a portfolio file here. Only include player columns.")
|
85 |
|
86 |
upload_toggle = st.selectbox("What source are you uploading from?", options=['SaberSim (Just IDs)', 'Draftkings/Fanduel (Names + IDs)', 'Other (Just Names)'])
|
87 |
if upload_toggle == 'SaberSim (Just IDs)' or upload_toggle == 'Draftkings/Fanduel (Names + IDs)':
|
|
|
125 |
st.session_state['portfolio'] = st.session_state['portfolio'].drop(columns=['Stack'])
|
126 |
else:
|
127 |
stack_dict = None
|
|
|
128 |
if st.session_state['portfolio'] is not None:
|
129 |
st.success('Portfolio file loaded successfully!')
|
130 |
st.session_state['portfolio'] = st.session_state['portfolio'].apply(lambda x: x.replace(player_wrong_names_mlb, player_right_names_mlb))
|
|
|
132 |
|
133 |
with col3:
|
134 |
st.subheader("Projections File")
|
135 |
+
st.info("upload a projections file that has 'player_names', 'salary', 'median', 'ownership', and 'captain ownership' columns. Note that the salary for showdown needs to be the FLEX salary, not the captain salary.")
|
136 |
|
137 |
# Create two columns for the uploader and template button
|
138 |
upload_col, template_col = st.columns([3, 1])
|
|
|
1000 |
parsed_frame = parsed_frame[~parsed_frame['Stack'].isin(stack_remove)]
|
1001 |
else:
|
1002 |
parsed_frame = parsed_frame
|
1003 |
+
st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True)
|
1004 |
st.session_state['export_merge'] = st.session_state['working_frame'].copy()
|
1005 |
|
1006 |
with st.expander('Micro Filter Options'):
|
|
|
1065 |
if size_include:
|
1066 |
parsed_frame = parsed_frame[parsed_frame['Size'].isin(size_include)]
|
1067 |
|
1068 |
+
st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True)
|
1069 |
st.session_state['export_merge'] = st.session_state['working_frame'].copy()
|
1070 |
|
1071 |
with st.expander('Trimming Options'):
|
|
|
1072 |
with st.form(key='trim_form'):
|
1073 |
st.write("Sorting and trimming variables:")
|
1074 |
perf_var, own_var = st.columns(2)
|
|
|
1101 |
st.session_state['working_frame'] = trim_portfolio(parsed_frame, trim_slack_var, performance_type, own_type, performance_threshold_high, performance_threshold_low, own_threshold_high, own_threshold_low)
|
1102 |
st.session_state['working_frame'] = st.session_state['working_frame'].sort_values(by='median', ascending=False)
|
1103 |
st.session_state['export_merge'] = st.session_state['working_frame'].copy()
|
1104 |
+
with st.expander('Presets'):
|
1105 |
+
with st.form(key='Small Field Preset'):
|
1106 |
+
lineup_target = st.number_input("Lineup Target", value=150, min_value=1, step=1)
|
1107 |
+
parsed_frame = small_field_preset(st.session_state['working_frame'], lineup_target)
|
1108 |
+
st.session_state['working_frame'] = parsed_frame.sort_values(by='Own', ascending=False)
|
1109 |
+
st.session_state['export_merge'] = st.session_state['working_frame'].copy()
|
1110 |
|
1111 |
with col2:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1112 |
if 'export_base' not in st.session_state:
|
1113 |
st.session_state['export_base'] = pd.DataFrame(columns=st.session_state['working_frame'].columns)
|
1114 |
|
global_func/small_field_preset.py
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
|
3 |
+
def small_field_preset(portfolio: pd.DataFrame, lineup_target: int):
|
4 |
+
|
5 |
+
for slack_var in range(0, 1, .1):
|
6 |
+
working_portfolio = portfolio.sort_values(by='Own', ascending = False).reset_index(drop=True)
|
7 |
+
rows_to_drop = []
|
8 |
+
curr_own_type_max = working_portfolio.loc[0, 'Weighted Own'] + (slack_var * working_portfolio.loc[0, 'Weighted Own'])
|
9 |
+
|
10 |
+
for i in range(1, len(working_portfolio)):
|
11 |
+
if working_portfolio.loc[i, 'Weighted Own'] > curr_own_type_max:
|
12 |
+
rows_to_drop.append(i)
|
13 |
+
else:
|
14 |
+
curr_own_type_max = working_portfolio.loc[i, 'Weighted Own'] + (slack_var * working_portfolio.loc[i, 'Weighted Own'])
|
15 |
+
|
16 |
+
working_portfolio = working_portfolio.drop(rows_to_drop).reset_index(drop=True)
|
17 |
+
if len(working_portfolio) >= lineup_target:
|
18 |
+
return working_portfolio.head(lineup_target)
|
19 |
+
|
20 |
+
return working_portfolio
|