Multichem commited on
Commit
7815102
·
verified ·
1 Parent(s): a6fd2c8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +275 -2
app.py CHANGED
@@ -1,8 +1,36 @@
1
  import streamlit as st
2
  import requests
 
 
 
 
 
 
 
 
 
 
3
 
4
  st.set_page_config(layout="wide")
5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  purge_cache = "https://sheetdb.io/api/v1/svino07zkd6j6/cache/purge/f8fc41b2"
7
 
8
  traderater = "https://www.fantasylife.com/api/projections/v1/nfl/ratemytrade/season/update"
@@ -24,11 +52,14 @@ dev_agg_url = "https://fantasylife.dev.spotlightsportsb2b.com/api/projections/v1
24
  freedman_nfl_game_model = "https://www.fantasylife.com/api/projections/v1/nfl-odds/james/game/update"
25
  thor_ncaaf_game_model = "https://www.fantasylife.com/api/projections/v1/ncaafb-odds/james/game/update"
26
 
 
 
 
27
  headers = {
28
  'Authorization': 'Bearer 6984da1f-2c81-4140-8206-d018af38533f',
29
  }
30
 
31
- tab1, tab2, tab3, tab4, tab5, tab6 = st.tabs(['Season Long (Live Site)', 'Season Long (Dev Site)', 'Weekly', 'Game Model', 'Trade Rater', 'Rest of Season'])
32
 
33
  with tab1:
34
  with st.container():
@@ -144,4 +175,246 @@ with tab6:
144
  if st.button("Rest of Season Update", key='reset13'):
145
  response = requests.post(ros_james_url, headers=headers)
146
  if response.status_code == 200:
147
- st.write("Uploading!")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
  import requests
3
+ import requests
4
+ import pandas as pd
5
+ from pandas import DataFrame
6
+ import numpy as np
7
+ import gspread
8
+ import pytz
9
+ from datetime import datetime
10
+ from datetime import date, timedelta
11
+ import time
12
+ from discordwebhook import Discord
13
 
14
  st.set_page_config(layout="wide")
15
 
16
+ scope = ['https://www.googleapis.com/auth/spreadsheets',
17
+ "https://www.googleapis.com/auth/drive"]
18
+
19
+ credentials = {
20
+ "type": "service_account",
21
+ "project_id": "sheets-api-connect-378620",
22
+ "private_key_id": "1005124050c80d085e2c5b344345715978dd9cc9",
23
+ "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtKa01beXwc88R\nnPZVQTNPVQuBnbwoOfc66gW3547ja/UEyIGAF112dt/VqHprRafkKGmlg55jqJNt\na4zceLKV+wTm7vBu7lDISTJfGzCf2TrxQYNqwMKE2LOjI69dBM8u4Dcb4k0wcp9v\ntW1ZzLVVuwTvmrg7JBHjiSaB+x5wxm/r3FOiJDXdlAgFlytzqgcyeZMJVKKBQHyJ\njEGg/1720A0numuOCt71w/2G0bDmijuj1e6tH32MwRWcvRNZ19K9ssyDz2S9p68s\nYDhIxX69OWxwScTIHLY6J2t8txf/XMivL/636fPlDADvBEVTdlT606n8CcKUVQeq\npUVdG+lfAgMBAAECggEAP38SUA7B69eTfRpo658ycOs3Amr0JW4H/bb1rNeAul0K\nZhwd/HnU4E07y81xQmey5kN5ZeNrD5EvqkZvSyMJHV0EEahZStwhjCfnDB/cxyix\nZ+kFhv4y9eK+kFpUAhBy5nX6T0O+2T6WvzAwbmbVsZ+X8kJyPuF9m8ldcPlD0sce\ntj8NwVq1ys52eosqs7zi2vjt+eMcaY393l4ls+vNq8Yf27cfyFw45W45CH/97/Nu\n5AmuzlCOAfFF+z4OC5g4rei4E/Qgpxa7/uom+BVfv9G0DIGW/tU6Sne0+37uoGKt\nW6DzhgtebUtoYkG7ZJ05BTXGp2lwgVcNRoPwnKJDxQKBgQDT5wYPUBDW+FHbvZSp\nd1m1UQuXyerqOTA9smFaM8sr/UraeH85DJPEIEk8qsntMBVMhvD3Pw8uIUeFNMYj\naLmZFObsL+WctepXrVo5NB6RtLB/jZYxiKMatMLUJIYtcKIp+2z/YtKiWcLnwotB\nWdCjVnPTxpkurmF2fWP/eewZ+wKBgQDRMtJg7etjvKyjYNQ5fARnCc+XsI3gkBe1\nX9oeXfhyfZFeBXWnZzN1ITgFHplDznmBdxAyYGiQdbbkdKQSghviUQ0igBvoDMYy\n1rWcy+a17Mj98uyNEfmb3X2cC6WpvOZaGHwg9+GY67BThwI3FqHIbyk6Ko09WlTX\nQpRQjMzU7QKBgAfi1iflu+q0LR+3a3vvFCiaToskmZiD7latd9AKk2ocsBd3Woy9\n+hXXecJHPOKV4oUJlJgvAZqe5HGBqEoTEK0wyPNLSQlO/9ypd+0fEnArwFHO7CMF\nycQprAKHJXM1eOOFFuZeQCaInqdPZy1UcV5Szla4UmUZWkk1m24blHzXAoGBAMcA\nyH4qdbxX9AYrC1dvsSRvgcnzytMvX05LU0uF6tzGtG0zVlub4ahvpEHCfNuy44UT\nxRWW/oFFaWjjyFxO5sWggpUqNuHEnRopg3QXx22SRRTGbN45li/+QAocTkgsiRh1\nqEcYZsO4mPCsQqAy6E2p6RcK+Xa+omxvSnVhq0x1AoGAKr8GdkCl4CF6rieLMAQ7\nLNBuuoYGaHoh8l5E2uOQpzwxVy/nMBcAv+2+KqHEzHryUv1owOi6pMLv7A9mTFoS\n18B0QRLuz5fSOsVnmldfC9fpUc6H8cH1SINZpzajqQA74bPwELJjnzrCnH79TnHG\nJuElxA33rFEjbgbzdyrE768=\n-----END PRIVATE KEY-----\n",
24
+ "client_email": "gspread-connection@sheets-api-connect-378620.iam.gserviceaccount.com",
25
+ "client_id": "106625872877651920064",
26
+ "auth_uri": "https://accounts.google.com/o/oauth2/auth",
27
+ "token_uri": "https://oauth2.googleapis.com/token",
28
+ "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
29
+ "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40sheets-api-connect-378620.iam.gserviceaccount.com"
30
+ }
31
+
32
+ gc = gspread.service_account_from_dict(credentials)
33
+
34
  purge_cache = "https://sheetdb.io/api/v1/svino07zkd6j6/cache/purge/f8fc41b2"
35
 
36
  traderater = "https://www.fantasylife.com/api/projections/v1/nfl/ratemytrade/season/update"
 
52
  freedman_nfl_game_model = "https://www.fantasylife.com/api/projections/v1/nfl-odds/james/game/update"
53
  thor_ncaaf_game_model = "https://www.fantasylife.com/api/projections/v1/ncaafb-odds/james/game/update"
54
 
55
+ NCAAF_model_url = 'https://docs.google.com/spreadsheets/d/17QUsCEMVAFbOteenUbi18H2kgVYx4cwBGs9dFCi4ri4/edit?pli=1&gid=1637459210#gid=1637459210'
56
+ pff_url = 'https://www.pff.com/api/scoreboard/schedule?league=ncaa&season=2024'
57
+
58
  headers = {
59
  'Authorization': 'Bearer 6984da1f-2c81-4140-8206-d018af38533f',
60
  }
61
 
62
+ tab1, tab2, tab3, tab4, tab5, tab6, tab7 = st.tabs(['Season Long (Live Site)', 'Season Long (Dev Site)', 'Weekly', 'Game Model', 'Trade Rater', 'Rest of Season', 'NCAAF Script'])
63
 
64
  with tab1:
65
  with st.container():
 
175
  if st.button("Rest of Season Update", key='reset13'):
176
  response = requests.post(ros_james_url, headers=headers)
177
  if response.status_code == 200:
178
+ st.write("Uploading!")
179
+
180
+ with tab7:
181
+ with st.container():
182
+ col1, col2, col3 = st.columns([3, 3, 3])
183
+
184
+ with col1:
185
+ st.info("Update NCAAF schedule and ranks")
186
+ if st.button("Update NCAAF", key='reset14'):
187
+ sh = gc.open_by_url(NCAAF_model_url)
188
+ worksheet = sh.worksheet('ATLranks')
189
+ ranks_df = DataFrame(worksheet.get_all_records())
190
+ ranks_dict = dict(zip(ranks_df.Team, ranks_df.ATL))
191
+ conf_dict = dict(zip(ranks_df.Team, ranks_df.Conference))
192
+
193
+ time.sleep(.5)
194
+
195
+ worksheet = sh.worksheet('HFA')
196
+ hfa_df = DataFrame(worksheet.get_all_records())
197
+ hfa_dict = dict(zip(hfa_df.Team, hfa_df.HFA))
198
+
199
+ time.sleep(.5)
200
+
201
+ worksheet = sh.worksheet('Odds')
202
+ odds_df = DataFrame(worksheet.get_all_records())
203
+ odds_dict = dict(zip(odds_df.Point_Spread, odds_df.Favorite_Win_Chance))
204
+
205
+ time.sleep(.5)
206
+
207
+ worksheet = sh.worksheet('Acronyms')
208
+ acros_df = DataFrame(worksheet.get_all_records())
209
+ right_acro = acros_df['Team'].tolist()
210
+ wrong_acro = acros_df['Acro'].tolist()
211
+
212
+ time.sleep(.5)
213
+
214
+ worksheet = sh.worksheet('Add games')
215
+ add_games_df = DataFrame(worksheet.get_all_records())
216
+ add_games_df.replace('', np.nan, inplace=True)
217
+
218
+ time.sleep(.5)
219
+
220
+ worksheet = sh.worksheet('Completed games')
221
+ comp_games_df = DataFrame(worksheet.get_all_records())
222
+ comp_games_df.replace('', np.nan, inplace=True)
223
+
224
+ time.sleep(.5)
225
+
226
+ worksheet = sh.worksheet('LY_scoring')
227
+ lyscore_df = DataFrame(worksheet.get_all_records())
228
+ for checkVar in range(len(wrong_acro)):
229
+ lyscore_df['Team'] = lyscore_df['Team'].replace(wrong_acro, right_acro)
230
+
231
+ PFA_dict = dict(zip(lyscore_df.Team, lyscore_df.PF_G_adj))
232
+ PAA_dict = dict(zip(lyscore_df.Team, lyscore_df.PA_G_adj))
233
+
234
+ # Send a GET request to the API
235
+ response = requests.get(pff_url)
236
+
237
+ # Check if the request was successful
238
+ if response.status_code == 200:
239
+ # Parse the JSON content
240
+ data = response.json()
241
+
242
+ # Extract the "weeks" object
243
+ weeks = data.get('weeks', [])
244
+
245
+ # Initialize an empty list to store game data
246
+ games_list = []
247
+ team_list = []
248
+
249
+ # Iterate over each week and its games
250
+ for week in weeks:
251
+ week_number = week.get('week')
252
+ for game in week.get('games', []):
253
+ # Add week number to the game dictionary
254
+ game['week'] = week_number
255
+ away_franchise = game.get('away_franchise', {})
256
+ away_franchise_groups = away_franchise.get('groups', {})
257
+ away_conf = away_franchise_groups[0]['name']
258
+ home_franchise = game.get('home_franchise', {})
259
+ home_franchise_groups = home_franchise.get('groups', {})
260
+ home_conf = home_franchise_groups[0]['name']
261
+
262
+ # Flatten the away and home franchise data
263
+ game_data = {
264
+ 'game_id': game.get('external_game_id'),
265
+ 'Day': game.get('kickoff_date'),
266
+ 'CST': game.get('kickoff_raw'),
267
+ 'away_id': away_franchise.get('abbreviation'),
268
+ 'Away': away_franchise.get('city'),
269
+ 'home_id': home_franchise.get('abbreviation'),
270
+ 'Home': home_franchise.get('city')
271
+ }
272
+
273
+ home_data = {
274
+ 'team': home_franchise.get('city'),
275
+ 'conf': home_conf
276
+ }
277
+
278
+ away_data = {
279
+ 'team': away_franchise.get('city'),
280
+ 'conf': away_conf
281
+ }
282
+
283
+ merged_data = game_data | game
284
+ team_data = home_data | away_data
285
+ games_list.append(merged_data)
286
+ team_list.append(home_data)
287
+ team_list.append(away_data)
288
+
289
+ # Create a DataFrame from the games list
290
+ df = pd.DataFrame(games_list)
291
+ team_df = pd.DataFrame(team_list)
292
+ team_df = team_df.drop_duplicates(subset=['team', 'conf'])
293
+
294
+ # Display the DataFrame
295
+ print(df)
296
+ else:
297
+ print(f"Failed to retrieve data. HTTP Status code: {response.status_code}")
298
+
299
+ df_raw = df[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'point_spread', 'over_under', 'Day', 'CST']]
300
+ df_raw['conf_game'] = np.nan
301
+ df_raw['Away_ATL'] = np.nan
302
+ df_raw['Home_ATL'] = np.nan
303
+ df_raw['Home Spread'] = np.nan
304
+ df_raw['Proj Total'] = np.nan
305
+ df_raw['Neutral'] = np.nan
306
+ df_raw['Notes'] = np.nan
307
+ df_raw['over_under'].fillna("", inplace=True)
308
+ df_raw['over_under'] = pd.to_numeric(df_raw['over_under'], errors='coerce')
309
+ df_raw = df_raw[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'conf_game', 'Away_ATL', 'Home_ATL', 'point_spread', 'Home Spread', 'over_under', 'Proj Total', 'Day', 'CST', 'Neutral', 'Notes']]
310
+ add_games_merge = add_games_df
311
+ comp_games_merge = comp_games_df
312
+ conf_adj = dict(zip(add_games_merge['game_id'], add_games_merge['conf_game']))
313
+ df_merge_1 = pd.concat([add_games_merge, df_raw])
314
+ df_cleaned = pd.concat([comp_games_merge, df_merge_1])
315
+ df_cleaned = df_cleaned[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'point_spread', 'over_under', 'Day', 'CST']]
316
+ df_cleaned = df_cleaned.drop_duplicates(subset=['game_id'])
317
+
318
+ def cond_away_PFA(row, df):
319
+ mask = (df['Away_ATL'] >= row['Away_ATL'] - 5) & (df['Away_ATL'] <= row['Away_ATL'] + 5)
320
+ return df.loc[mask, 'Away_PFA'].mean()
321
+
322
+ def cond_home_PFA(row, df):
323
+ mask = (df['Home_ATL'] >= row['Home_ATL'] - 5) & (df['Home_ATL'] <= row['Home_ATL'] + 5)
324
+ return df.loc[mask, 'Home_PFA'].mean()
325
+
326
+ def cond_away_PAA(row, df):
327
+ mask = (df['Away_ATL'] >= row['Away_ATL'] - 5) & (df['Away_ATL'] <= row['Away_ATL'] + 5)
328
+ return df.loc[mask, 'Away_PAA'].mean()
329
+
330
+ def cond_home_PAA(row, df):
331
+ mask = (df['Home_ATL'] >= row['Home_ATL'] - 5) & (df['Home_ATL'] <= row['Home_ATL'] + 5)
332
+ return df.loc[mask, 'Home_PAA'].mean()
333
+
334
+ for checkVar in range(len(wrong_acro)):
335
+ df_cleaned['Away'] = df_cleaned['Away'].replace(wrong_acro, right_acro)
336
+ df_cleaned['Home'] = df_cleaned['Home'].replace(wrong_acro, right_acro)
337
+ df_cleaned['Away_conf'] = df_cleaned['Away'].map(conf_dict)
338
+ df_cleaned['Home_conf'] = df_cleaned['Home'].map(conf_dict)
339
+ df_cleaned['conf_game_var'] = np.where((df_cleaned['Away_conf'] == df_cleaned['Home_conf']), 1, 0)
340
+ df_cleaned['conf_game'] = df_cleaned.apply(lambda row: conf_adj.get(row['game_id'], row['conf_game_var']), axis=1)
341
+ df_cleaned['Away_ATL'] = df_cleaned['Away'].map(ranks_dict)
342
+ df_cleaned['Home_ATL'] = df_cleaned['Home'].map(ranks_dict)
343
+ df_cleaned['Away_PFA'] = df_cleaned['Away'].map(PFA_dict)
344
+ df_cleaned['Home_PFA'] = df_cleaned['Home'].map(PFA_dict)
345
+ df_cleaned['Away_PAA'] = df_cleaned['Away'].map(PAA_dict)
346
+ df_cleaned['Home_PAA'] = df_cleaned['Home'].map(PAA_dict)
347
+
348
+ # Apply the function to each row in the DataFrame
349
+ df_cleaned['cond_away_PFA'] = df_cleaned.apply(lambda row: cond_away_PFA(row, df_cleaned), axis=1)
350
+ df_cleaned['cond_home_PFA'] = df_cleaned.apply(lambda row: cond_home_PFA(row, df_cleaned), axis=1)
351
+ df_cleaned['cond_away_PAA'] = df_cleaned.apply(lambda row: cond_away_PAA(row, df_cleaned), axis=1)
352
+ df_cleaned['cond_home_PAA'] = df_cleaned.apply(lambda row: cond_home_PAA(row, df_cleaned), axis=1)
353
+
354
+ df_cleaned['cond_away_PFA'] = np.where((df_cleaned['Away_ATL'] <= 0), 18, df_cleaned['cond_away_PFA'])
355
+ df_cleaned['cond_away_PAA'] = np.where((df_cleaned['Away_ATL'] <= 0), 36, df_cleaned['cond_away_PAA'])
356
+ df_cleaned['cond_home_PFA'] = np.where((df_cleaned['Home_ATL'] <= 0), 18, df_cleaned['cond_home_PFA'])
357
+ df_cleaned['cond_home_PAA'] = np.where((df_cleaned['Home_ATL'] <= 0), 36, df_cleaned['cond_home_PAA'])
358
+
359
+ df_cleaned['Away_PFA'] = df_cleaned['Away_PFA'].fillna(df_cleaned['cond_away_PFA'])
360
+ df_cleaned['Away_PAA'] = df_cleaned['Away_PAA'].fillna(df_cleaned['cond_away_PAA'])
361
+ df_cleaned['Home_PFA'] = df_cleaned['Home_PFA'].fillna(df_cleaned['cond_home_PFA'])
362
+ df_cleaned['Home_PAA'] = df_cleaned['Home_PAA'].fillna(df_cleaned['cond_home_PAA'])
363
+
364
+ df_cleaned['Away_PFA_adj'] = (df_cleaned['Away_PFA'] * .75 + df_cleaned['Home_PAA'] * .25)
365
+ df_cleaned['Home_PFA_adj'] = (df_cleaned['Home_PFA'] * .75 + df_cleaned['Away_PAA'] * .25)
366
+ df_cleaned['Away_PFA_cond'] = (df_cleaned['cond_away_PFA'] * .75 + df_cleaned['cond_home_PAA'] * .25)
367
+ df_cleaned['Home_PFA_cond'] = (df_cleaned['cond_home_PFA'] * .75 + df_cleaned['cond_away_PAA'] * .25)
368
+
369
+ df_cleaned['HFA'] = df_cleaned['Home'].map(hfa_dict)
370
+ df_cleaned['Neutral'] = np.nan
371
+ df_cleaned['Home Spread'] = ((df_cleaned['Home_ATL'] - df_cleaned['Away_ATL']) + df_cleaned['HFA']) * -1
372
+ df_cleaned['Win Prob'] = df_cleaned['Home Spread'].map(odds_dict)
373
+ df_cleaned['Spread Adj'] = np.nan
374
+ df_cleaned['Final Spread'] = np.nan
375
+ df_cleaned['Proj Total'] = df_cleaned['Away_PFA_adj'] + df_cleaned['Home_PFA_adj']
376
+ df_cleaned['Proj Total (adj)'] = np.where(df_cleaned['over_under'] != np.nan, (df_cleaned['over_under'] * .66 + df_cleaned['Proj Total'] * .34), df_cleaned['Proj Total'])
377
+ df_cleaned['Proj Total (adj)'] = df_cleaned['Proj Total (adj)'].fillna(df_cleaned['Proj Total'])
378
+ df_cleaned['Total Adj'] = np.nan
379
+ df_cleaned['Final Total'] = np.nan
380
+ df_cleaned['Notes'] = np.nan
381
+
382
+ export_df_1 = df_cleaned[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'conf_game', 'Away_ATL', 'Home_ATL', 'point_spread', 'Home Spread',
383
+ 'over_under', 'Proj Total (adj)', 'Day', 'CST', 'Neutral', 'Notes']]
384
+
385
+
386
+ export_df_1.rename(columns={"pff_week": "week", "point_spread": "Vegas Spread", "over_under": "Vegas Total", "Proj Total (adj)": "Proj Total"}, inplace = True)
387
+ export_df_2 = add_games_df
388
+ export_df = export_df_1
389
+ export_df['week'] = pd.to_numeric(export_df['week'], errors='coerce')
390
+ export_df = export_df.drop_duplicates(subset=['week', 'Away', 'Home'])
391
+ export_df = export_df.sort_values(by='week', ascending=True)
392
+
393
+ export_df['Vegas Spread'] = pd.to_numeric(export_df['Vegas Spread'], errors='coerce')
394
+ export_df['Vegas Total'] = pd.to_numeric(export_df['Vegas Total'], errors='coerce')
395
+ export_df['Proj Total'] = pd.to_numeric(export_df['Proj Total'], errors='coerce')
396
+ export_df['Home Spread'] = pd.to_numeric(export_df['Home Spread'], errors='coerce')
397
+ export_df.replace([np.nan, np.inf, -np.inf], '', inplace=True)
398
+ export_df = export_df.drop_duplicates(subset=['week', 'away_id', 'home_id'])
399
+
400
+ sh = gc.open_by_url(NCAAF_model_url)
401
+ worksheet = sh.worksheet('Master_sched')
402
+ worksheet.batch_clear(['A:P'])
403
+ worksheet.update([export_df.columns.values.tolist()] + export_df.values.tolist())
404
+
405
+ sheet_list = ['W0', 'W1', 'W2', 'W3', 'W4', 'W5', 'W6', 'W7', 'W8', 'W9', 'W10', 'W11', 'W12', 'W13', 'W14']
406
+ # sheet_list = ['W0']
407
+ counter = 0
408
+
409
+ for sheet_name in sheet_list:
410
+ export_cull = export_df[export_df['week'] == str(counter)]
411
+ sh = gc.open_by_url(NCAAF_model_url)
412
+ worksheet = sh.worksheet(sheet_name)
413
+ worksheet.batch_clear(['A:P'])
414
+ worksheet.update([export_cull.columns.values.tolist()] + export_cull.values.tolist())
415
+
416
+ counter += 1
417
+
418
+ time.sleep(3.76)
419
+
420
+ st.write("Finished NCAAF script!")