Spaces:
Running
Running
import streamlit as st | |
import requests | |
import pandas as pd | |
from pandas import DataFrame | |
import numpy as np | |
import gspread | |
import pytz | |
from datetime import datetime | |
from datetime import date, timedelta | |
import time | |
st.set_page_config(layout="wide") | |
scope = ['https://www.googleapis.com/auth/spreadsheets', | |
"https://www.googleapis.com/auth/drive"] | |
credentials = { | |
"type": "service_account", | |
"project_id": "sheets-api-connect-378620", | |
"private_key_id": st.secrets['sheets_api_connect_pk'], | |
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtKa01beXwc88R\nnPZVQTNPVQuBnbwoOfc66gW3547ja/UEyIGAF112dt/VqHprRafkKGmlg55jqJNt\na4zceLKV+wTm7vBu7lDISTJfGzCf2TrxQYNqwMKE2LOjI69dBM8u4Dcb4k0wcp9v\ntW1ZzLVVuwTvmrg7JBHjiSaB+x5wxm/r3FOiJDXdlAgFlytzqgcyeZMJVKKBQHyJ\njEGg/1720A0numuOCt71w/2G0bDmijuj1e6tH32MwRWcvRNZ19K9ssyDz2S9p68s\nYDhIxX69OWxwScTIHLY6J2t8txf/XMivL/636fPlDADvBEVTdlT606n8CcKUVQeq\npUVdG+lfAgMBAAECggEAP38SUA7B69eTfRpo658ycOs3Amr0JW4H/bb1rNeAul0K\nZhwd/HnU4E07y81xQmey5kN5ZeNrD5EvqkZvSyMJHV0EEahZStwhjCfnDB/cxyix\nZ+kFhv4y9eK+kFpUAhBy5nX6T0O+2T6WvzAwbmbVsZ+X8kJyPuF9m8ldcPlD0sce\ntj8NwVq1ys52eosqs7zi2vjt+eMcaY393l4ls+vNq8Yf27cfyFw45W45CH/97/Nu\n5AmuzlCOAfFF+z4OC5g4rei4E/Qgpxa7/uom+BVfv9G0DIGW/tU6Sne0+37uoGKt\nW6DzhgtebUtoYkG7ZJ05BTXGp2lwgVcNRoPwnKJDxQKBgQDT5wYPUBDW+FHbvZSp\nd1m1UQuXyerqOTA9smFaM8sr/UraeH85DJPEIEk8qsntMBVMhvD3Pw8uIUeFNMYj\naLmZFObsL+WctepXrVo5NB6RtLB/jZYxiKMatMLUJIYtcKIp+2z/YtKiWcLnwotB\nWdCjVnPTxpkurmF2fWP/eewZ+wKBgQDRMtJg7etjvKyjYNQ5fARnCc+XsI3gkBe1\nX9oeXfhyfZFeBXWnZzN1ITgFHplDznmBdxAyYGiQdbbkdKQSghviUQ0igBvoDMYy\n1rWcy+a17Mj98uyNEfmb3X2cC6WpvOZaGHwg9+GY67BThwI3FqHIbyk6Ko09WlTX\nQpRQjMzU7QKBgAfi1iflu+q0LR+3a3vvFCiaToskmZiD7latd9AKk2ocsBd3Woy9\n+hXXecJHPOKV4oUJlJgvAZqe5HGBqEoTEK0wyPNLSQlO/9ypd+0fEnArwFHO7CMF\nycQprAKHJXM1eOOFFuZeQCaInqdPZy1UcV5Szla4UmUZWkk1m24blHzXAoGBAMcA\nyH4qdbxX9AYrC1dvsSRvgcnzytMvX05LU0uF6tzGtG0zVlub4ahvpEHCfNuy44UT\nxRWW/oFFaWjjyFxO5sWggpUqNuHEnRopg3QXx22SRRTGbN45li/+QAocTkgsiRh1\nqEcYZsO4mPCsQqAy6E2p6RcK+Xa+omxvSnVhq0x1AoGAKr8GdkCl4CF6rieLMAQ7\nLNBuuoYGaHoh8l5E2uOQpzwxVy/nMBcAv+2+KqHEzHryUv1owOi6pMLv7A9mTFoS\n18B0QRLuz5fSOsVnmldfC9fpUc6H8cH1SINZpzajqQA74bPwELJjnzrCnH79TnHG\nJuElxA33rFEjbgbzdyrE768=\n-----END PRIVATE KEY-----\n", | |
"client_email": "gspread-connection@sheets-api-connect-378620.iam.gserviceaccount.com", | |
"client_id": "106625872877651920064", | |
"auth_uri": "https://accounts.google.com/o/oauth2/auth", | |
"token_uri": "https://oauth2.googleapis.com/token", | |
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", | |
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40sheets-api-connect-378620.iam.gserviceaccount.com" | |
} | |
gc = gspread.service_account_from_dict(credentials) | |
traderater = "https://www.fantasylife.com/api/projections/v1/nfl/ratemytrade/season/update" | |
ros_james_url = "https://www.fantasylife.com/api/projections/v1/nfl/james/ros/update" | |
dwain_url = "https://www.fantasylife.com/api/projections/v1/nfl/dwain/season/update" | |
freedman_url = "https://www.fantasylife.com/api/projections/v1/nfl/freedman/season/update" | |
agg_url = "https://www.fantasylife.com/api/projections/v1/nfl/aggregate/season/update" | |
weekly_dwain_url = "https://www.fantasylife.com/api/projections/v1/nfl/dwain/game/update" | |
weekly_freedman_url = "https://www.fantasylife.com/api/projections/v1/nfl/freedman/game/update" | |
weekly_agg_url = "https://www.fantasylife.com/api/projections/v1/nfl/aggregate/game/update" | |
dev_dwain_url = "https://fantasylife.dev.spotlightsportsb2b.com/api/projections/v1/nfl/dwain/season/update" | |
dev_freedman_url = "https://fantasylife.dev.spotlightsportsb2b.com/api/projections/v1/nfl/freedman/season/update" | |
dev_agg_url = "https://fantasylife.dev.spotlightsportsb2b.com/api/projections/v1/nfl/aggregate/season/update" | |
freedman_nfl_game_model = "https://www.fantasylife.com/api/projections/v1/nfl-odds/james/game/update" | |
thor_ncaaf_game_model = "https://www.fantasylife.com/api/projections/v1/ncaafb-odds/james/game/update" | |
NCAAF_model_url = st.secrets['NCAAF_model_url'] | |
pff_url = st.secrets['pff_url'] | |
headers = { | |
'Authorization': st.secrets['FL_Authorization'], | |
} | |
tab1, tab2, tab3, tab4, tab5, tab6, tab7 = st.tabs(['Season Long (Live Site)', 'Season Long (Dev Site)', 'Weekly', 'Game Model', 'Trade Rater', 'Rest of Season', 'NCAAF Script']) | |
with tab1: | |
with st.container(): | |
col1, col2, col3 = st.columns([3, 3, 3]) | |
with col1: | |
st.info("Update Dwain's LIVE SITE FantasyLife Season Long Projections") | |
if st.button("Dwain Projection Update (Live Seasonal)", key='reset1'): | |
response = requests.post(dwain_url, headers=headers) | |
if response.status_code == 200: | |
st.write("Uploading!") | |
with col2: | |
st.info("Update Freedman's LIVE SITE FantasyLife Season Long Projections") | |
if st.button("Freedman Projection Update (Live Seasonal)", key='reset2'): | |
response = requests.post(freedman_url, headers=headers) | |
if response.status_code == 200: | |
st.write("Uploading!") | |
with col3: | |
st.info("Update the Aggregate LIVE SITE FantasyLife Season Long Projections") | |
if st.button("Aggregate Projection Update (Live Seasonal)", key='reset3'): | |
response = requests.post(agg_url, headers=headers) | |
if response.status_code == 200: | |
st.write("Uploading!") | |
with tab2: | |
with st.container(): | |
col1, col2, col3 = st.columns([3, 3, 3]) | |
with col1: | |
st.info("Update Dwain's DEV SITE FantasyLife Season Long Projections") | |
if st.button("Dwain Projection Update (Dev Seasonal)", key='reset4'): | |
response = requests.post(dev_dwain_url, headers=headers) | |
if response.status_code == 200: | |
st.write("Uploading!") | |
with col2: | |
st.info("Update Freedman's DEV SITE FantasyLife Season Long Projections") | |
if st.button("Freedman Projection Update (Dev Seasonal)", key='reset5'): | |
response = requests.post(dev_freedman_url, headers=headers) | |
if response.status_code == 200: | |
st.write("Uploading!") | |
with col3: | |
st.info("Update the Aggregate DEV SITE FantasyLife Season Long Projections") | |
if st.button("Aggregate Projection Update (Dev Seasonal)", key='reset6'): | |
response = requests.post(dev_agg_url, headers=headers) | |
if response.status_code == 200: | |
st.write("Uploading!") | |
with tab3: | |
with st.container(): | |
col1, col2, col3 = st.columns([3, 3, 3]) | |
with col1: | |
st.info("Update Dwain's FantasyLife Weekly Projections") | |
if st.button("Dwain Projection Update (Weekly)", key='reset7'): | |
response = requests.post(weekly_dwain_url, headers=headers) | |
if response.status_code == 200: | |
st.write("Uploading!") | |
with col2: | |
st.info("Update Freedman's FantasyLife Weekly Projections") | |
if st.button("Freedman Projection Update (Weekly)", key='reset8'): | |
response = requests.post(weekly_freedman_url, headers=headers) | |
if response.status_code == 200: | |
st.write("Uploading!") | |
with col3: | |
st.info("Update the Aggregate FantasyLife Weekly Projections") | |
if st.button("Aggregate Projection Update (Weekly)", key='reset9'): | |
response = requests.post(weekly_agg_url, headers=headers) | |
if response.status_code == 200: | |
st.write("Uploading!") | |
with tab4: | |
with st.container(): | |
col1, col2, col3 = st.columns([3, 3, 3]) | |
with col1: | |
st.info("Update Freedman NFL Game Model") | |
if st.button("Update Freedman NFL Game Model (Weekly)", key='reset10'): | |
response = requests.post(freedman_nfl_game_model, headers=headers) | |
if response.status_code == 200: | |
st.write("Uploading!") | |
with col2: | |
st.info("Update Thor NCCAF Game Model") | |
if st.button("Update Thor NCCAF Game Model (Weekly)", key='reset11'): | |
response = requests.post(thor_ncaaf_game_model, headers=headers) | |
if response.status_code == 200: | |
st.write("Uploading!") | |
with tab5: | |
with st.container(): | |
col1, col2, col3 = st.columns([3, 3, 3]) | |
with col1: | |
st.info("Update FantasyLife Trade Rater") | |
if st.button("Projection Update (Trade Rater)", key='reset12'): | |
response = requests.post(traderater, headers=headers) | |
if response.status_code == 200: | |
st.write("Uploading!") | |
with tab6: | |
with st.container(): | |
col1, col2, col3 = st.columns([3, 3, 3]) | |
with col1: | |
st.info("Update Rest of Season Projections") | |
if st.button("Rest of Season Update", key='reset13'): | |
response = requests.post(ros_james_url, headers=headers) | |
if response.status_code == 200: | |
st.write("Uploading!") | |
with tab7: | |
with st.container(): | |
col1, col2, col3 = st.columns([3, 3, 3]) | |
with col1: | |
st.info("Update NCAAF schedule and ranks") | |
if st.button("Update NCAAF", key='reset14'): | |
st.write("Initiated") | |
sh = gc.open_by_url(NCAAF_model_url) | |
worksheet = sh.worksheet('ATLranks') | |
ranks_df = DataFrame(worksheet.get_all_records()) | |
ranks_dict = dict(zip(ranks_df.Team, ranks_df.ATL)) | |
conf_dict = dict(zip(ranks_df.Team, ranks_df.Conference)) | |
time.sleep(.5) | |
worksheet = sh.worksheet('HFA') | |
hfa_df = DataFrame(worksheet.get_all_records()) | |
hfa_dict = dict(zip(hfa_df.Team, hfa_df.HFA)) | |
time.sleep(.5) | |
worksheet = sh.worksheet('Odds') | |
odds_df = DataFrame(worksheet.get_all_records()) | |
odds_dict = dict(zip(odds_df.Point_Spread, odds_df.Favorite_Win_Chance)) | |
time.sleep(.5) | |
worksheet = sh.worksheet('Acronyms') | |
acros_df = DataFrame(worksheet.get_all_records()) | |
right_acro = acros_df['Team'].tolist() | |
wrong_acro = acros_df['Acro'].tolist() | |
time.sleep(.5) | |
worksheet = sh.worksheet('Add games') | |
add_games_df = DataFrame(worksheet.get_all_records()) | |
add_games_df.replace('', np.nan, inplace=True) | |
neutral_dict = dict(zip(add_games_df.game_id, add_games_df.Neutral)) | |
time.sleep(.5) | |
worksheet = sh.worksheet('Completed games') | |
comp_games_df = DataFrame(worksheet.get_all_records()) | |
comp_games_df.replace('', np.nan, inplace=True) | |
time.sleep(.5) | |
worksheet = sh.worksheet('LY_scoring') | |
lyscore_df = DataFrame(worksheet.get_all_records()) | |
for checkVar in range(len(wrong_acro)): | |
lyscore_df['Team'] = lyscore_df['Team'].replace(wrong_acro, right_acro) | |
PFA_dict = dict(zip(lyscore_df.Team, lyscore_df.PF_G_adj)) | |
PAA_dict = dict(zip(lyscore_df.Team, lyscore_df.PA_G_adj)) | |
# Send a GET request to the API | |
response = requests.get(pff_url) | |
st.write("retreiving PFF data") | |
# Check if the request was successful | |
if response.status_code == 200: | |
# Parse the JSON content | |
data = response.json() | |
# Extract the "weeks" object | |
weeks = data.get('weeks', []) | |
# Initialize an empty list to store game data | |
games_list = [] | |
team_list = [] | |
# Iterate over each week and its games | |
for week in weeks: | |
week_number = week.get('week') | |
for game in week.get('games', []): | |
# Add week number to the game dictionary | |
game['week'] = week_number | |
away_franchise = game.get('away_franchise', {}) | |
away_franchise_groups = away_franchise.get('groups', {}) | |
away_conf = away_franchise_groups[0]['name'] | |
home_franchise = game.get('home_franchise', {}) | |
home_franchise_groups = home_franchise.get('groups', {}) | |
home_conf = home_franchise_groups[0]['name'] | |
# Flatten the away and home franchise data | |
game_data = { | |
'game_id': game.get('external_game_id'), | |
'Day': game.get('kickoff_date'), | |
'CST': game.get('kickoff_raw'), | |
'away_id': away_franchise.get('abbreviation'), | |
'Away': away_franchise.get('city'), | |
'home_id': home_franchise.get('abbreviation'), | |
'Home': home_franchise.get('city') | |
} | |
home_data = { | |
'team': home_franchise.get('city'), | |
'conf': home_conf | |
} | |
away_data = { | |
'team': away_franchise.get('city'), | |
'conf': away_conf | |
} | |
merged_data = game_data | game | |
team_data = home_data | away_data | |
games_list.append(merged_data) | |
team_list.append(home_data) | |
team_list.append(away_data) | |
# Create a DataFrame from the games list | |
df = pd.DataFrame(games_list) | |
team_df = pd.DataFrame(team_list) | |
team_df = team_df.drop_duplicates(subset=['team', 'conf']) | |
# Display the DataFrame | |
print(df) | |
else: | |
print(f"Failed to retrieve data. HTTP Status code: {response.status_code}") | |
st.write("Cleaning data") | |
df_raw = df[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'point_spread', 'over_under', 'Day', 'CST']] | |
df_raw['conf_game'] = np.nan | |
df_raw['Away_ATL'] = np.nan | |
df_raw['Home_ATL'] = np.nan | |
df_raw['Home Spread'] = np.nan | |
df_raw['Proj Total'] = np.nan | |
df_raw['Neutral'] = np.nan | |
df_raw['Notes'] = np.nan | |
df_raw['over_under'].fillna("", inplace=True) | |
df_raw['over_under'] = pd.to_numeric(df_raw['over_under'], errors='coerce') | |
df_raw = df_raw[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'conf_game', 'Away_ATL', 'Home_ATL', 'point_spread', 'Home Spread', 'over_under', 'Proj Total', 'Day', 'CST', 'Neutral', 'Notes']] | |
add_games_merge = add_games_df | |
comp_games_merge = comp_games_df | |
conf_adj = dict(zip(add_games_merge['game_id'], add_games_merge['conf_game'])) | |
df_merge_1 = pd.concat([add_games_merge, df_raw]) | |
df_cleaned = pd.concat([comp_games_merge, df_merge_1]) | |
df_cleaned = df_cleaned[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'point_spread', 'over_under', 'Day', 'CST']] | |
df_cleaned = df_cleaned.drop_duplicates(subset=['game_id']) | |
def cond_away_PFA(row, df): | |
mask = (df['Away_ATL'] >= row['Away_ATL'] - 5) & (df['Away_ATL'] <= row['Away_ATL'] + 5) | |
return df.loc[mask, 'Away_PFA'].mean() | |
def cond_home_PFA(row, df): | |
mask = (df['Home_ATL'] >= row['Home_ATL'] - 5) & (df['Home_ATL'] <= row['Home_ATL'] + 5) | |
return df.loc[mask, 'Home_PFA'].mean() | |
def cond_away_PAA(row, df): | |
mask = (df['Away_ATL'] >= row['Away_ATL'] - 5) & (df['Away_ATL'] <= row['Away_ATL'] + 5) | |
return df.loc[mask, 'Away_PAA'].mean() | |
def cond_home_PAA(row, df): | |
mask = (df['Home_ATL'] >= row['Home_ATL'] - 5) & (df['Home_ATL'] <= row['Home_ATL'] + 5) | |
return df.loc[mask, 'Home_PAA'].mean() | |
for checkVar in range(len(wrong_acro)): | |
df_cleaned['Away'] = df_cleaned['Away'].replace(wrong_acro, right_acro) | |
df_cleaned['Home'] = df_cleaned['Home'].replace(wrong_acro, right_acro) | |
df_cleaned['Away_conf'] = df_cleaned['Away'].map(conf_dict) | |
df_cleaned['Home_conf'] = df_cleaned['Home'].map(conf_dict) | |
df_cleaned['conf_game_var'] = np.where((df_cleaned['Away_conf'] == df_cleaned['Home_conf']), 1, 0) | |
df_cleaned['conf_game'] = df_cleaned.apply(lambda row: conf_adj.get(row['game_id'], row['conf_game_var']), axis=1) | |
df_cleaned['Away_ATL'] = df_cleaned['Away'].map(ranks_dict) | |
df_cleaned['Home_ATL'] = df_cleaned['Home'].map(ranks_dict) | |
df_cleaned['Away_PFA'] = df_cleaned['Away'].map(PFA_dict) | |
df_cleaned['Home_PFA'] = df_cleaned['Home'].map(PFA_dict) | |
df_cleaned['Away_PAA'] = df_cleaned['Away'].map(PAA_dict) | |
df_cleaned['Home_PAA'] = df_cleaned['Home'].map(PAA_dict) | |
# Apply the function to each row in the DataFrame | |
df_cleaned['cond_away_PFA'] = df_cleaned.apply(lambda row: cond_away_PFA(row, df_cleaned), axis=1) | |
df_cleaned['cond_home_PFA'] = df_cleaned.apply(lambda row: cond_home_PFA(row, df_cleaned), axis=1) | |
df_cleaned['cond_away_PAA'] = df_cleaned.apply(lambda row: cond_away_PAA(row, df_cleaned), axis=1) | |
df_cleaned['cond_home_PAA'] = df_cleaned.apply(lambda row: cond_home_PAA(row, df_cleaned), axis=1) | |
df_cleaned['cond_away_PFA'] = np.where((df_cleaned['Away_ATL'] <= 0), 18, df_cleaned['cond_away_PFA']) | |
df_cleaned['cond_away_PAA'] = np.where((df_cleaned['Away_ATL'] <= 0), 36, df_cleaned['cond_away_PAA']) | |
df_cleaned['cond_home_PFA'] = np.where((df_cleaned['Home_ATL'] <= 0), 18, df_cleaned['cond_home_PFA']) | |
df_cleaned['cond_home_PAA'] = np.where((df_cleaned['Home_ATL'] <= 0), 36, df_cleaned['cond_home_PAA']) | |
df_cleaned['Away_PFA'] = df_cleaned['Away_PFA'].fillna(df_cleaned['cond_away_PFA']) | |
df_cleaned['Away_PAA'] = df_cleaned['Away_PAA'].fillna(df_cleaned['cond_away_PAA']) | |
df_cleaned['Home_PFA'] = df_cleaned['Home_PFA'].fillna(df_cleaned['cond_home_PFA']) | |
df_cleaned['Home_PAA'] = df_cleaned['Home_PAA'].fillna(df_cleaned['cond_home_PAA']) | |
df_cleaned['Away_PFA_adj'] = (df_cleaned['Away_PFA'] * .75 + df_cleaned['Home_PAA'] * .25) | |
df_cleaned['Home_PFA_adj'] = (df_cleaned['Home_PFA'] * .75 + df_cleaned['Away_PAA'] * .25) | |
df_cleaned['Away_PFA_cond'] = (df_cleaned['cond_away_PFA'] * .75 + df_cleaned['cond_home_PAA'] * .25) | |
df_cleaned['Home_PFA_cond'] = (df_cleaned['cond_home_PFA'] * .75 + df_cleaned['cond_away_PAA'] * .25) | |
df_cleaned['Neutral'] = df_cleaned['game_id'].map(neutral_dict) | |
df_cleaned['HFA'] = np.where(df_cleaned['Neutral'] == 1, 0, df_cleaned['Home'].map(hfa_dict)) | |
df_cleaned['Neutral'] = np.nan | |
df_cleaned['Home Spread'] = ((df_cleaned['Home_ATL'] - df_cleaned['Away_ATL']) + df_cleaned['HFA']) * -1 | |
df_cleaned['Win Prob'] = df_cleaned['Home Spread'].map(odds_dict) | |
df_cleaned['Spread Adj'] = np.nan | |
df_cleaned['Final Spread'] = np.nan | |
df_cleaned['Proj Total'] = df_cleaned['Away_PFA_adj'] + df_cleaned['Home_PFA_adj'] | |
df_cleaned['Proj Total (adj)'] = np.where(df_cleaned['over_under'] != np.nan, (df_cleaned['over_under'] * .66 + df_cleaned['Proj Total'] * .34), df_cleaned['Proj Total']) | |
df_cleaned['Proj Total (adj)'] = df_cleaned['Proj Total (adj)'].fillna(df_cleaned['Proj Total']) | |
df_cleaned['Total Adj'] = np.nan | |
df_cleaned['Final Total'] = np.nan | |
df_cleaned['Notes'] = np.nan | |
export_df_1 = df_cleaned[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'conf_game', 'Away_ATL', 'Home_ATL', 'point_spread', 'Home Spread', | |
'over_under', 'Proj Total (adj)', 'Day', 'CST', 'Neutral', 'Notes']] | |
export_df_1.rename(columns={"pff_week": "week", "point_spread": "Vegas Spread", "over_under": "Vegas Total", "Proj Total (adj)": "Proj Total"}, inplace = True) | |
export_df_2 = add_games_df | |
export_df = export_df_1 | |
export_df['week'] = pd.to_numeric(export_df['week'], errors='coerce') | |
export_df = export_df.drop_duplicates(subset=['week', 'Away', 'Home']) | |
export_df = export_df.sort_values(by='week', ascending=True) | |
export_df['Vegas Spread'] = pd.to_numeric(export_df['Vegas Spread'], errors='coerce') | |
export_df['Vegas Total'] = pd.to_numeric(export_df['Vegas Total'], errors='coerce') | |
export_df['Proj Total'] = pd.to_numeric(export_df['Proj Total'], errors='coerce') | |
export_df['Home Spread'] = pd.to_numeric(export_df['Home Spread'], errors='coerce') | |
export_df.replace([np.nan, np.inf, -np.inf], '', inplace=True) | |
export_df = export_df.drop_duplicates(subset=['week', 'away_id', 'home_id']) | |
sh = gc.open_by_url(NCAAF_model_url) | |
worksheet = sh.worksheet('Master_sched') | |
worksheet.batch_clear(['A:P']) | |
worksheet.update([export_df.columns.values.tolist()] + export_df.values.tolist()) | |
st.write("Uploaded Master Schedule") | |
st.write("Finished NCAAF Script!") |