Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
import streamlit as st
|
2 |
import requests
|
|
|
3 |
import pandas as pd
|
4 |
from pandas import DataFrame
|
5 |
import numpy as np
|
@@ -8,6 +9,7 @@ import pytz
|
|
8 |
from datetime import datetime
|
9 |
from datetime import date, timedelta
|
10 |
import time
|
|
|
11 |
|
12 |
st.set_page_config(layout="wide")
|
13 |
|
@@ -182,6 +184,9 @@ with tab7:
|
|
182 |
with col1:
|
183 |
st.info("Update NCAAF schedule and ranks")
|
184 |
if st.button("Update NCAAF", key='reset14'):
|
|
|
|
|
|
|
185 |
sh = gc.open_by_url(NCAAF_model_url)
|
186 |
worksheet = sh.worksheet('ATLranks')
|
187 |
ranks_df = DataFrame(worksheet.get_all_records())
|
@@ -232,6 +237,8 @@ with tab7:
|
|
232 |
# Send a GET request to the API
|
233 |
response = requests.get(pff_url)
|
234 |
|
|
|
|
|
235 |
# Check if the request was successful
|
236 |
if response.status_code == 200:
|
237 |
# Parse the JSON content
|
@@ -293,7 +300,9 @@ with tab7:
|
|
293 |
print(df)
|
294 |
else:
|
295 |
print(f"Failed to retrieve data. HTTP Status code: {response.status_code}")
|
296 |
-
|
|
|
|
|
297 |
df_raw = df[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'point_spread', 'over_under', 'Day', 'CST']]
|
298 |
df_raw['conf_game'] = np.nan
|
299 |
df_raw['Away_ATL'] = np.nan
|
@@ -400,6 +409,8 @@ with tab7:
|
|
400 |
worksheet.batch_clear(['A:P'])
|
401 |
worksheet.update([export_df.columns.values.tolist()] + export_df.values.tolist())
|
402 |
|
|
|
|
|
403 |
sheet_list = ['W0', 'W1', 'W2', 'W3', 'W4', 'W5', 'W6', 'W7', 'W8', 'W9', 'W10', 'W11', 'W12', 'W13', 'W14']
|
404 |
# sheet_list = ['W0']
|
405 |
counter = 0
|
@@ -415,4 +426,4 @@ with tab7:
|
|
415 |
|
416 |
time.sleep(3.76)
|
417 |
|
418 |
-
st.write("Finished NCAAF
|
|
|
1 |
import streamlit as st
|
2 |
import requests
|
3 |
+
import requests
|
4 |
import pandas as pd
|
5 |
from pandas import DataFrame
|
6 |
import numpy as np
|
|
|
9 |
from datetime import datetime
|
10 |
from datetime import date, timedelta
|
11 |
import time
|
12 |
+
from discordwebhook import Discord
|
13 |
|
14 |
st.set_page_config(layout="wide")
|
15 |
|
|
|
184 |
with col1:
|
185 |
st.info("Update NCAAF schedule and ranks")
|
186 |
if st.button("Update NCAAF", key='reset14'):
|
187 |
+
|
188 |
+
st.write("Initiated")
|
189 |
+
|
190 |
sh = gc.open_by_url(NCAAF_model_url)
|
191 |
worksheet = sh.worksheet('ATLranks')
|
192 |
ranks_df = DataFrame(worksheet.get_all_records())
|
|
|
237 |
# Send a GET request to the API
|
238 |
response = requests.get(pff_url)
|
239 |
|
240 |
+
st.write("retreiving PFF data")
|
241 |
+
|
242 |
# Check if the request was successful
|
243 |
if response.status_code == 200:
|
244 |
# Parse the JSON content
|
|
|
300 |
print(df)
|
301 |
else:
|
302 |
print(f"Failed to retrieve data. HTTP Status code: {response.status_code}")
|
303 |
+
|
304 |
+
|
305 |
+
st.write("Cleaning data")
|
306 |
df_raw = df[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'point_spread', 'over_under', 'Day', 'CST']]
|
307 |
df_raw['conf_game'] = np.nan
|
308 |
df_raw['Away_ATL'] = np.nan
|
|
|
409 |
worksheet.batch_clear(['A:P'])
|
410 |
worksheet.update([export_df.columns.values.tolist()] + export_df.values.tolist())
|
411 |
|
412 |
+
|
413 |
+
st.write("Uploaded Master Schedule, now uploading weeks")
|
414 |
sheet_list = ['W0', 'W1', 'W2', 'W3', 'W4', 'W5', 'W6', 'W7', 'W8', 'W9', 'W10', 'W11', 'W12', 'W13', 'W14']
|
415 |
# sheet_list = ['W0']
|
416 |
counter = 0
|
|
|
426 |
|
427 |
time.sleep(3.76)
|
428 |
|
429 |
+
st.write("Finished NCAAF Script!")
|