James McCool
commited on
Commit
·
76d511e
1
Parent(s):
d765ee8
Enhance metric calculations and player data handling in app.py
Browse files- Introduced logic to calculate 'stack' and 'stack_size' metrics for the 'Showdown' game type, improving data analysis capabilities.
- Added functionality to identify and count duplicate player combinations, enhancing data integrity.
- Updated player column creation to exclude specific columns, streamlining data processing and improving clarity.
app.py
CHANGED
@@ -84,6 +84,7 @@ with tab2:
|
|
84 |
if 'Contest' in st.session_state and 'projections_df' in st.session_state:
|
85 |
col1, col2 = st.columns([1, 8])
|
86 |
excluded_cols = ['BaseName', 'EntryCount']
|
|
|
87 |
|
88 |
# Create mapping dictionaries
|
89 |
map_dict = {
|
@@ -131,7 +132,27 @@ with tab2:
|
|
131 |
working_df['salary'] = working_df.apply(lambda row: sum(map_dict['salary_map'].get(player, 0) for player in row), axis=1)
|
132 |
working_df['median'] = working_df.apply(lambda row: sum(map_dict['proj_map'].get(player, 0) for player in row), axis=1)
|
133 |
working_df['Own'] = working_df.apply(lambda row: sum(map_dict['own_map'].get(player, 0) for player in row), axis=1)
|
|
|
|
|
|
|
|
|
|
|
|
|
134 |
elif type_var == 'Showdown':
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
135 |
working_df['salary'] = working_df.apply(
|
136 |
lambda row: map_dict['cpt_salary_map'].get(row.iloc[0], 0) +
|
137 |
sum(map_dict['salary_map'].get(player, 0) for player in row.iloc[1:]),
|
@@ -147,6 +168,12 @@ with tab2:
|
|
147 |
sum(map_dict['own_map'].get(player, 0) for player in row.iloc[1:]),
|
148 |
axis=1
|
149 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
150 |
|
151 |
# Initialize pagination in session state if not exists
|
152 |
if 'current_page' not in st.session_state:
|
|
|
84 |
if 'Contest' in st.session_state and 'projections_df' in st.session_state:
|
85 |
col1, col2 = st.columns([1, 8])
|
86 |
excluded_cols = ['BaseName', 'EntryCount']
|
87 |
+
player_columns = [col for col in st.session_state['Contest'].columns if col not in excluded_cols]
|
88 |
|
89 |
# Create mapping dictionaries
|
90 |
map_dict = {
|
|
|
132 |
working_df['salary'] = working_df.apply(lambda row: sum(map_dict['salary_map'].get(player, 0) for player in row), axis=1)
|
133 |
working_df['median'] = working_df.apply(lambda row: sum(map_dict['proj_map'].get(player, 0) for player in row), axis=1)
|
134 |
working_df['Own'] = working_df.apply(lambda row: sum(map_dict['own_map'].get(player, 0) for player in row), axis=1)
|
135 |
+
working_df['sorted'] = working_df[player_columns].apply(
|
136 |
+
lambda row: ','.join(sorted(row.values)),
|
137 |
+
axis=1
|
138 |
+
)
|
139 |
+
working_df['dupes'] = working_df.groupby('sorted').transform('size')
|
140 |
+
working_df = working_df.drop('sorted', axis=1)
|
141 |
elif type_var == 'Showdown':
|
142 |
+
working_df['stack'] = working_df.apply(
|
143 |
+
lambda row: Counter(
|
144 |
+
map_dict['team_map'].get(player, '') for player in row
|
145 |
+
if map_dict['team_map'].get(player, '') != ''
|
146 |
+
).most_common(1)[0][0] if any(map_dict['team_map'].get(player, '') for player in row) else '',
|
147 |
+
axis=1
|
148 |
+
)
|
149 |
+
working_df['stack_size'] = working_df.apply(
|
150 |
+
lambda row: Counter(
|
151 |
+
map_dict['team_map'].get(player, '') for player in row
|
152 |
+
if map_dict['team_map'].get(player, '') != ''
|
153 |
+
).most_common(1)[0][1] if any(map_dict['team_map'].get(player, '') for player in row) else '',
|
154 |
+
axis=1
|
155 |
+
)
|
156 |
working_df['salary'] = working_df.apply(
|
157 |
lambda row: map_dict['cpt_salary_map'].get(row.iloc[0], 0) +
|
158 |
sum(map_dict['salary_map'].get(player, 0) for player in row.iloc[1:]),
|
|
|
168 |
sum(map_dict['own_map'].get(player, 0) for player in row.iloc[1:]),
|
169 |
axis=1
|
170 |
)
|
171 |
+
working_df['sorted'] = working_df[player_columns].apply(
|
172 |
+
lambda row: row[0] + '|' + ','.join(sorted(row[1:].values)),
|
173 |
+
axis=1
|
174 |
+
)
|
175 |
+
working_df['dupes'] = working_df.groupby('sorted').transform('size')
|
176 |
+
working_df = working_df.drop('sorted', axis=1)
|
177 |
|
178 |
# Initialize pagination in session state if not exists
|
179 |
if 'current_page' not in st.session_state:
|