James McCool commited on
Commit
af96e17
·
1 Parent(s): 2a9bc74

initial commit after visual update

Browse files
Files changed (4) hide show
  1. .streamlit/secrets.toml +1 -0
  2. Dockerfile +13 -1
  3. requirements.txt +8 -3
  4. src/streamlit_app.py +452 -37
.streamlit/secrets.toml ADDED
@@ -0,0 +1 @@
 
 
1
+ mongo_uri = "mongodb+srv://multichem:[email protected]/?retryWrites=true&w=majority&appName=TestCluster"
Dockerfile CHANGED
@@ -1,4 +1,4 @@
1
- FROM python:3.9-slim
2
 
3
  WORKDIR /app
4
 
@@ -11,6 +11,18 @@ RUN apt-get update && apt-get install -y \
11
 
12
  COPY requirements.txt ./
13
  COPY src/ ./src/
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
  RUN pip3 install -r requirements.txt
16
 
 
1
+ FROM python:3.12-slim
2
 
3
  WORKDIR /app
4
 
 
11
 
12
  COPY requirements.txt ./
13
  COPY src/ ./src/
14
+ COPY .streamlit/ ./.streamlit/
15
+
16
+
17
+
18
+ ENV MONGO_URI="mongodb+srv://multichem:[email protected]/?retryWrites=true&w=majority&appName=TestCluster"
19
+ RUN useradd -m -u 1000 user
20
+ USER user
21
+ ENV HOME=/home/user\
22
+ PATH=/home/user/.local/bin:$PATH
23
+ WORKDIR $HOME/app
24
+ RUN pip install --no-cache-dir --upgrade pip
25
+ COPY --chown=user . $HOME/app
26
 
27
  RUN pip3 install -r requirements.txt
28
 
requirements.txt CHANGED
@@ -1,3 +1,8 @@
1
- altair
2
- pandas
3
- streamlit
 
 
 
 
 
 
1
+ streamlit
2
+ openpyxl
3
+ matplotlib
4
+ pulp
5
+ docker
6
+ plotly
7
+ scipy
8
+ pymongo
src/streamlit_app.py CHANGED
@@ -1,40 +1,455 @@
1
- import altair as alt
2
  import numpy as np
3
  import pandas as pd
4
- import streamlit as st
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
- """
7
- # Welcome to Streamlit!
8
-
9
- Edit `/streamlit_app.py` to customize this app to your heart's desire :heart:.
10
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
11
- forums](https://discuss.streamlit.io).
12
-
13
- In the meantime, below is an example of what you can do with just a few lines of code:
14
- """
15
-
16
- num_points = st.slider("Number of points in spiral", 1, 10000, 1100)
17
- num_turns = st.slider("Number of turns in spiral", 1, 300, 31)
18
-
19
- indices = np.linspace(0, 1, num_points)
20
- theta = 2 * np.pi * num_turns * indices
21
- radius = indices
22
-
23
- x = radius * np.cos(theta)
24
- y = radius * np.sin(theta)
25
-
26
- df = pd.DataFrame({
27
- "x": x,
28
- "y": y,
29
- "idx": indices,
30
- "rand": np.random.randn(num_points),
31
- })
32
-
33
- st.altair_chart(alt.Chart(df, height=700, width=700)
34
- .mark_point(filled=True)
35
- .encode(
36
- x=alt.X("x", axis=None),
37
- y=alt.Y("y", axis=None),
38
- color=alt.Color("idx", legend=None, scale=alt.Scale()),
39
- size=alt.Size("rand", legend=None, scale=alt.Scale(range=[1, 150])),
40
- ))
 
1
+ import streamlit as st
2
  import numpy as np
3
  import pandas as pd
4
+ import pymongo
5
+ import re
6
+ import os
7
+ from itertools import combinations
8
+
9
+ st.set_page_config(layout="wide")
10
+
11
+ @st.cache_resource
12
+ def init_conn():
13
+ # Try to get from environment variable first, fall back to secrets
14
+ uri = os.getenv('MONGO_URI')
15
+ if not uri:
16
+ uri = st.secrets['mongo_uri']
17
+ client = pymongo.MongoClient(uri, retryWrites=True, serverSelectionTimeoutMS=500000)
18
+ db = client["NFL_Database"]
19
+
20
+ return db
21
+
22
+ db = init_conn()
23
+
24
+ game_format = {'Win Percentage': '{:.2%}','First Inning Lead Percentage': '{:.2%}',
25
+ 'Fifth Inning Lead Percentage': '{:.2%}', '8+ runs': '{:.2%}', 'DK LevX': '{:.2%}', 'FD LevX': '{:.2%}'}
26
+
27
+ team_roo_format = {'Top Score%': '{:.2%}','0 Runs': '{:.2%}', '1 Run': '{:.2%}', '2 Runs': '{:.2%}', '3 Runs': '{:.2%}', '4 Runs': '{:.2%}',
28
+ '5 Runs': '{:.2%}','6 Runs': '{:.2%}', '7 Runs': '{:.2%}', '8 Runs': '{:.2%}', '9 Runs': '{:.2%}', '10 Runs': '{:.2%}'}
29
+
30
+ wrong_acro = ['WSH', 'AZ', 'CHW']
31
+ right_acro = ['WAS', 'ARI', 'CWS']
32
+
33
+ st.markdown("""
34
+ <style>
35
+ /* Tab styling */
36
+ .stElementContainer [data-baseweb="button-group"] {
37
+ gap: 8px;
38
+ padding: 4px;
39
+ }
40
+ .stElementContainer [kind="segmented_control"] {
41
+ height: 45px;
42
+ white-space: pre-wrap;
43
+ background-color: #DAA520;
44
+ color: white;
45
+ border-radius: 10px;
46
+ gap: 1px;
47
+ padding: 10px 20px;
48
+ font-weight: bold;
49
+ transition: all 0.3s ease;
50
+ }
51
+ .stElementContainer [kind="segmented_controlActive"] {
52
+ height: 50px;
53
+ background-color: #DAA520;
54
+ border: 3px solid #FFD700;
55
+ color: white;
56
+ }
57
+ .stElementContainer [kind="segmented_control"]:hover {
58
+ background-color: #FFD700;
59
+ cursor: pointer;
60
+ }
61
+
62
+ div[data-baseweb="select"] > div {
63
+ background-color: #DAA520;
64
+ color: white;
65
+ }
66
+
67
+ </style>""", unsafe_allow_html=True)
68
+
69
+ @st.cache_resource(ttl=60)
70
+ def init_baselines():
71
+
72
+ collection = db["Player_Baselines"]
73
+ cursor = collection.find()
74
+
75
+ raw_display = pd.DataFrame(list(cursor))
76
+ raw_display = raw_display[['name', 'Team', 'Opp', 'Position', 'Salary', 'team_plays', 'team_pass', 'team_rush', 'team_tds', 'team_pass_tds', 'team_rush_tds', 'dropbacks', 'pass_yards', 'pass_tds',
77
+ 'rush_att', 'rush_yards', 'rush_tds', 'targets', 'rec', 'rec_yards', 'rec_tds', 'PPR', 'Half_PPR', 'Own']]
78
+ player_stats = raw_display[raw_display['Position'] != 'K']
79
+
80
+ collection = db["DK_NFL_ROO"]
81
+ cursor = collection.find()
82
+
83
+ raw_display = pd.DataFrame(list(cursor))
84
+ raw_display = raw_display.rename(columns={'player_ID': 'player_id'})
85
+ raw_display = raw_display[['Player', 'Position', 'Team', 'Opp', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '20+%', '2x%', '3x%', '4x%',
86
+ 'Own', 'Small_Field_Own', 'Large_Field_Own', 'Cash_Field_Own', 'CPT_Own', 'LevX', 'version', 'slate', 'timestamp', 'player_id', 'site']]
87
+ load_display = raw_display[raw_display['Position'] != 'K']
88
+ dk_roo_raw = load_display.dropna(subset=['Median'])
89
+
90
+ collection = db["FD_NFL_ROO"]
91
+ cursor = collection.find()
92
+
93
+ raw_display = pd.DataFrame(list(cursor))
94
+ raw_display = raw_display.rename(columns={'player_ID': 'player_id'})
95
+ raw_display = raw_display[['Player', 'Position', 'Team', 'Opp', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '20+%', '2x%', '3x%', '4x%',
96
+ 'Own', 'Small_Field_Own', 'Large_Field_Own', 'Cash_Field_Own', 'CPT_Own', 'LevX', 'version', 'slate', 'timestamp', 'player_id', 'site']]
97
+ load_display = raw_display[raw_display['Position'] != 'K']
98
+ fd_roo_raw = load_display.dropna(subset=['Median'])
99
+
100
+ collection = db["DK_DFS_Stacks"]
101
+ cursor = collection.find()
102
+
103
+ raw_display = pd.DataFrame(list(cursor))
104
+ raw_display = raw_display[['Team', 'QB', 'WR1_TE', 'WR2_TE', 'Total', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '60+%', '2x%', '3x%', '4x%', 'Own', 'LevX', 'slate', 'version']]
105
+ dk_stacks_raw = raw_display.copy()
106
+
107
+ collection = db["FD_DFS_Stacks"]
108
+ cursor = collection.find()
109
+
110
+ raw_display = pd.DataFrame(list(cursor))
111
+ raw_display = raw_display[['Team', 'QB', 'WR1_TE', 'WR2_TE', 'Total', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '60+%', '2x%', '3x%', '4x%', 'Own', 'LevX', 'slate', 'version']]
112
+ fd_stacks_raw = raw_display.copy()
113
+
114
+ return player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw
115
+
116
+ @st.cache_data
117
+ def convert_df_to_csv(df):
118
+ return df.to_csv().encode('utf-8')
119
+
120
+ player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = init_baselines()
121
+
122
+ app_load_reset_column, app_view_site_column = st.columns([1, 9])
123
+ with app_load_reset_column:
124
+ if st.button("Load/Reset Data", key='reset_data_button'):
125
+ st.cache_data.clear()
126
+ player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = init_baselines()
127
+ for key in st.session_state.keys():
128
+ del st.session_state[key]
129
+ with app_view_site_column:
130
+ with st.container():
131
+ app_view_column, app_site_column = st.columns([3, 3])
132
+ with app_view_column:
133
+ view_var = st.selectbox("Select view", ["Simple", "Advanced"], key='view_selectbox')
134
+ with app_site_column:
135
+ site_var = st.selectbox("What site do you want to view?", ('Draftkings', 'Fanduel'), key='site_selectbox')
136
+
137
+ selected_tab = st.segmented_control(
138
+ "Select Tab",
139
+ options=["Stack Finder", "User Upload"],
140
+ selection_mode='single',
141
+ default='Stack Finder',
142
+ width='stretch',
143
+ label_visibility='collapsed',
144
+ key='tab_selector'
145
+ )
146
+
147
+ if selected_tab == 'Stack Finder':
148
+ with st.expander("Stack Finder"):
149
+ app_info_column, slate_choice_column, filtering_column, stack_info_column = st.columns(4)
150
+ with app_info_column:
151
+ if st.button("Load/Reset Data", key='reset1'):
152
+ st.cache_data.clear()
153
+ player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = init_baselines()
154
+ for key in st.session_state.keys():
155
+ del st.session_state[key]
156
+ st.info(f"Last Update: " + str(st.session_state['handbuilder_data']['timestamp'][0]) + f" CST")
157
+ with slate_choice_column:
158
+ slate_var1 = st.radio("What slate are you working with?", ('Main Slate', 'Secondary Slate', 'Thurs-Mon Slate', 'User Upload'), key='slate_var1')
159
+ if slate_var1 == 'User Upload':
160
+ slate_var1 = st.session_state['proj_dataframe']
161
+ else:
162
+ if site_var == 'Draftkings':
163
+ raw_baselines = dk_roo_raw
164
+ if slate_var1 == 'Main Slate':
165
+ raw_baselines = raw_baselines[raw_baselines['Slate'] == 'main_slate']
166
+ elif slate_var1 == 'Secondary Slate':
167
+ raw_baselines = raw_baselines[raw_baselines['Slate'] == 'secondary_slate']
168
+ elif slate_var1 == 'Thurs-Mon Slate':
169
+ raw_baselines = raw_baselines[raw_baselines['Slate'] == 'thurs_mon_slate']
170
+ raw_baselines = raw_baselines.sort_values(by='Own', ascending=False)
171
+ qb_lookup = raw_baselines[raw_baselines['Position'] == 'QB']
172
+ elif site_var == 'Fanduel':
173
+ raw_baselines = fd_roo_raw
174
+ if slate_var1 == 'Main Slate':
175
+ raw_baselines = raw_baselines[raw_baselines['Slate'] == 'main_slate']
176
+ elif slate_var1 == 'Secondary Slate':
177
+ raw_baselines = raw_baselines[raw_baselines['Slate'] == 'secondary_slate']
178
+ elif slate_var1 == 'Thurs-Mon Slate':
179
+ raw_baselines = raw_baselines[raw_baselines['Slate'] == 'thurs_mon_slate']
180
+ raw_baselines = raw_baselines.sort_values(by='Own', ascending=False)
181
+ qb_lookup = raw_baselines[raw_baselines['Position'] == 'QB']
182
+ with filtering_column:
183
+ split_var2 = st.radio("Would you like to run stack analysis for the full slate or individual teams?", ('Full Slate Run', 'Specific Teams'), key='split_var2')
184
+ if split_var2 == 'Specific Teams':
185
+ team_var2 = st.multiselect('Which teams would you like to include in the analysis?', options = raw_baselines['Team'].unique(), key='team_var2')
186
+ elif split_var2 == 'Full Slate Run':
187
+ team_var2 = raw_baselines.Team.unique().tolist()
188
+ pos_split2 = st.radio("Are you viewing all positions, specific groups, or specific positions?", ('All Positions', 'Specific Positions'), key='pos_split2')
189
+ if pos_split2 == 'Specific Positions':
190
+ pos_var2 = st.multiselect('What Positions would you like to view?', options = ['WR', 'TE', 'RB'])
191
+ elif pos_split2 == 'All Positions':
192
+ pos_var2 = 'All'
193
+ with stack_info_column:
194
+ if site_var == 'Draftkings':
195
+ max_sal2 = st.number_input('Max Salary', min_value = 5000, max_value = 50000, value = 35000, step = 100, key='max_sal2')
196
+ elif site_var == 'Fanduel':
197
+ max_sal2 = st.number_input('Max Salary', min_value = 5000, max_value = 35000, value = 25000, step = 100, key='max_sal2')
198
+ size_var2 = st.selectbox('What size of stacks are you analyzing?', options = ['3-man', '4-man', '5-man'])
199
+ if size_var2 == '3-man':
200
+ stack_size = 3
201
+ if size_var2 == '4-man':
202
+ stack_size = 4
203
+ if size_var2 == '5-man':
204
+ stack_size = 5
205
+
206
+ team_dict = dict(zip(raw_baselines.Player, raw_baselines.Team))
207
+ proj_dict = dict(zip(raw_baselines.Player, raw_baselines.Median))
208
+ own_dict = dict(zip(raw_baselines.Player, raw_baselines.Own))
209
+ cost_dict = dict(zip(raw_baselines.Player, raw_baselines.Salary))
210
+ qb_dict = dict(zip(qb_lookup.Team, qb_lookup.Player))
211
+
212
+ if site_var == 'Draftkings':
213
+ position_limits = {
214
+ 'QB': 1,
215
+ 'RB': 2,
216
+ 'WR': 3,
217
+ 'TE': 1,
218
+ 'UTIL': 1,
219
+ 'DST': 1,
220
+ # Add more as needed
221
+ }
222
+ max_salary = max_sal2
223
+ max_players = 9
224
+ else:
225
+ position_limits = {
226
+ 'QB': 1,
227
+ 'RB': 2,
228
+ 'WR': 3,
229
+ 'TE': 1,
230
+ 'UTIL': 1,
231
+ 'DST': 1,
232
+ # Add more as needed
233
+ }
234
+ max_salary = max_sal2
235
+ max_players = 9
236
+
237
+ stack_hold_container = st.empty()
238
+ comb_list = []
239
+ if pos_split2 == 'All Positions':
240
+ raw_baselines = raw_baselines
241
+ elif pos_split2 != 'All Positions':
242
+ raw_baselines = raw_baselines[raw_baselines['Position'].str.contains('|'.join(pos_var2))]
243
+
244
+ # Create a position dictionary mapping players to their eligible positions
245
+ pos_dict = dict(zip(raw_baselines.Player, raw_baselines.Position))
246
+
247
+ def is_valid_combination(combo):
248
+ # Count positions in this combination
249
+ position_counts = {pos: 0 for pos in position_limits.keys()}
250
+
251
+ # For each player in the combination
252
+ for player in combo:
253
+ # Get their eligible positions
254
+ player_positions = pos_dict[player].split('/')
255
+
256
+ # For each position they can play
257
+ for pos in player_positions:
258
+ if pos == 'UTIL':
259
+ # UTIL can be filled by any position
260
+ for p in position_counts:
261
+ position_counts[p] += 1
262
+
263
+ # Check if any position exceeds its limit
264
+ for pos, limit in position_limits.items():
265
+ if position_counts[pos] > limit:
266
+ return False
267
+
268
+ return True
269
+
270
+ # Modify the combination generation code
271
+ comb_list = []
272
+ for cur_team in team_var2:
273
+ working_baselines = raw_baselines
274
+ working_baselines = working_baselines[working_baselines['Team'] == cur_team]
275
+ working_baselines = working_baselines[working_baselines['Position'] != 'DST']
276
+ working_baselines = working_baselines[working_baselines['Position'] != 'K']
277
+ qb_var = qb_dict[cur_team]
278
+ order_list = working_baselines['Player']
279
+
280
+ comb = combinations(order_list, stack_size)
281
+
282
+ for i in list(comb):
283
+ if qb_var in i:
284
+ comb_list.append(i)
285
+
286
+ # Only add combinations that satisfy position limits
287
+ for i in list(comb):
288
+ if is_valid_combination(i):
289
+ comb_list.append(i)
290
+
291
+ comb_DF = pd.DataFrame(comb_list)
292
+
293
+ if stack_size == 3:
294
+ comb_DF['Team'] = comb_DF[0].map(team_dict)
295
+
296
+ comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict),
297
+ comb_DF[1].map(proj_dict),
298
+ comb_DF[2].map(proj_dict)])
299
+
300
+ comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict),
301
+ comb_DF[1].map(cost_dict),
302
+ comb_DF[2].map(cost_dict)])
303
+
304
+ comb_DF['Own%'] = sum([comb_DF[0].map(own_dict),
305
+ comb_DF[1].map(own_dict),
306
+ comb_DF[2].map(own_dict)])
307
+ elif stack_size == 4:
308
+ comb_DF['Team'] = comb_DF[0].map(team_dict)
309
+
310
+ comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict),
311
+ comb_DF[1].map(proj_dict),
312
+ comb_DF[2].map(proj_dict),
313
+ comb_DF[3].map(proj_dict)])
314
+
315
+ comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict),
316
+ comb_DF[1].map(cost_dict),
317
+ comb_DF[2].map(cost_dict),
318
+ comb_DF[3].map(cost_dict)])
319
+
320
+ comb_DF['Own%'] = sum([comb_DF[0].map(own_dict),
321
+ comb_DF[1].map(own_dict),
322
+ comb_DF[2].map(own_dict),
323
+ comb_DF[3].map(own_dict)])
324
+ elif stack_size == 5:
325
+ comb_DF['Team'] = comb_DF[0].map(team_dict)
326
+
327
+ comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict),
328
+ comb_DF[1].map(proj_dict),
329
+ comb_DF[2].map(proj_dict),
330
+ comb_DF[3].map(proj_dict),
331
+ comb_DF[4].map(proj_dict)])
332
+
333
+ comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict),
334
+ comb_DF[1].map(cost_dict),
335
+ comb_DF[2].map(cost_dict),
336
+ comb_DF[3].map(cost_dict),
337
+ comb_DF[4].map(cost_dict)])
338
+
339
+ comb_DF['Own%'] = sum([comb_DF[0].map(own_dict),
340
+ comb_DF[1].map(own_dict),
341
+ comb_DF[2].map(own_dict),
342
+ comb_DF[3].map(own_dict),
343
+ comb_DF[4].map(own_dict)])
344
+
345
+ comb_DF = comb_DF.sort_values(by='Proj', ascending=False)
346
+ comb_DF = comb_DF.loc[comb_DF['Salary'] <= max_sal2]
347
+
348
+ cut_var = 0
349
+
350
+ if stack_size == 2:
351
+ while cut_var <= int(len(comb_DF)):
352
+ try:
353
+ if int(cut_var) == 0:
354
+ cur_proj = float(comb_DF.iat[cut_var, 3])
355
+ cur_own = float(comb_DF.iat[cut_var, 5])
356
+ elif int(cut_var) >= 1:
357
+ check_own = float(comb_DF.iat[cut_var, 5])
358
+ if check_own > cur_own:
359
+ comb_DF = comb_DF.drop([cut_var])
360
+ cur_own = cur_own
361
+ cut_var = cut_var - 1
362
+ comb_DF = comb_DF.reset_index()
363
+ comb_DF = comb_DF.drop(['index'], axis=1)
364
+ elif check_own <= cur_own:
365
+ cur_own = float(comb_DF.iat[cut_var, 5])
366
+ cut_var = cut_var
367
+ cut_var += 1
368
+ except:
369
+ cut_var += 1
370
+ elif stack_size == 3:
371
+ while cut_var <= int(len(comb_DF)):
372
+ try:
373
+ if int(cut_var) == 0:
374
+ cur_proj = float(comb_DF.iat[cut_var,4])
375
+ cur_own = float(comb_DF.iat[cut_var,6])
376
+ elif int(cut_var) >= 1:
377
+ check_own = float(comb_DF.iat[cut_var,6])
378
+ if check_own > cur_own:
379
+ comb_DF = comb_DF.drop([cut_var])
380
+ cur_own = cur_own
381
+ cut_var = cut_var - 1
382
+ comb_DF = comb_DF.reset_index()
383
+ comb_DF = comb_DF.drop(['index'], axis=1)
384
+ elif check_own <= cur_own:
385
+ cur_own = float(comb_DF.iat[cut_var,6])
386
+ cut_var = cut_var
387
+ cut_var += 1
388
+ except:
389
+ cut_var += 1
390
+ elif stack_size == 4:
391
+ while cut_var <= int(len(comb_DF)):
392
+ try:
393
+ if int(cut_var) == 0:
394
+ cur_proj = float(comb_DF.iat[cut_var,5])
395
+ cur_own = float(comb_DF.iat[cut_var,7])
396
+ elif int(cut_var) >= 1:
397
+ check_own = float(comb_DF.iat[cut_var,7])
398
+ if check_own > cur_own:
399
+ comb_DF = comb_DF.drop([cut_var])
400
+ cur_own = cur_own
401
+ cut_var = cut_var - 1
402
+ comb_DF = comb_DF.reset_index()
403
+ comb_DF = comb_DF.drop(['index'], axis=1)
404
+ elif check_own <= cur_own:
405
+ cur_own = float(comb_DF.iat[cut_var,7])
406
+ cut_var = cut_var
407
+ cut_var += 1
408
+ except:
409
+ cut_var += 1
410
+ elif stack_size == 5:
411
+ while cut_var <= int(len(comb_DF)):
412
+ try:
413
+ if int(cut_var) == 0:
414
+ cur_proj = float(comb_DF.iat[cut_var,6])
415
+ cur_own = float(comb_DF.iat[cut_var,8])
416
+ elif int(cut_var) >= 1:
417
+ check_own = float(comb_DF.iat[cut_var,8])
418
+ if check_own > cur_own:
419
+ comb_DF = comb_DF.drop([cut_var])
420
+ cur_own = cur_own
421
+ cut_var = cut_var - 1
422
+ comb_DF = comb_DF.reset_index()
423
+ comb_DF = comb_DF.drop(['index'], axis=1)
424
+ elif check_own <= cur_own:
425
+ cur_own = float(comb_DF.iat[cut_var,8])
426
+ cut_var = cut_var
427
+ cut_var += 1
428
+ except:
429
+ cut_var += 1
430
+
431
+ with stack_hold_container:
432
+ stack_hold_container = st.empty()
433
+ st.dataframe(comb_DF.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True)
434
+ st.download_button(
435
+ label="Export Tables",
436
+ data=convert_df_to_csv(comb_DF),
437
+ file_name='NFL_Stack_Options_export.csv',
438
+ mime='text/csv',
439
+ )
440
+
441
+ if selected_tab == 'User Upload':
442
+ st.info("The Projections file can have any columns in any order, but must contain columns explicitly named: 'Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', and 'Own'.")
443
+ col1, col2 = st.columns([1, 5])
444
 
445
+ with col1:
446
+ proj_file = st.file_uploader("Upload Projections File", key = 'proj_uploader')
447
+
448
+ if proj_file is not None:
449
+ try:
450
+ st.session_state['proj_dataframe'] = pd.read_csv(proj_file)
451
+ except:
452
+ st.session_state['proj_dataframe'] = pd.read_excel(proj_file)
453
+ with col2:
454
+ if proj_file is not None:
455
+ st.dataframe(st.session_state['proj_dataframe'].style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True)