import atexit
import datetime

from apscheduler.schedulers.background import BackgroundScheduler
from fastapi import FastAPI
import utils
import pandas as pd
import uvicorn

from classes import Productivity, Throughput



# Global variables (saves time on loading data)
state_vars = None
reload_timestamp = datetime.datetime.now().strftime('%D %T')

data_all = None
data_24h = None

app = FastAPI()

def load_data():
    """
    Reload the state variables
    """
    global data_all, data_24h, reload_timestamp
    
    utils.fetch_new_runs()
    
    data_all = utils.preload_data()
    
    data_24h = (pd.Timestamp.now() -  data_all['updated_at'].apply(lambda x: pd.Timestamp(x)) < pd.Timedelta('1 days'))

    reload_timestamp = datetime.datetime.now().strftime('%D %T')

    print(f'Reloaded data at {reload_timestamp}')


def start_scheduler():
    scheduler = BackgroundScheduler()
    scheduler.add_job(func=load_data, trigger="interval", seconds=60*30)
    scheduler.start()

    # Shut down the scheduler when exiting the app
    atexit.register(lambda: scheduler.shutdown())


@app.get("/")
def home():
    return "Welcome to the Bittensor Protein Folding Leaderboard API!"


@app.get("/updated")
def updated():
    return reload_timestamp


@app.get("/productivity", response_model=Productivity)
def productivity_metrics():
    """
    Get the productivity metrics
    """

    return Productivity(all_time=utils.get_productivity(data_all), last_24h=utils.get_productivity(data_24h))


@app.get("/throughput", response_model=Throughput)
def throughput_metrics():
    """
    Get the throughput metrics
    """
    
    return Throughput(all_time=utils.get_data_transferred(data_all), last_24h=utils.get_data_transferred(data_24h))



if __name__ == '__main__':

    load_data()

    start_scheduler()

    uvicorn.run(app, host='0.0.0.0', port=5001)
    

    # to test locally
    # curl -X GET http://0.0.0.0:5001/data