import asyncio from fastapi import FastAPI import requests import pandas as pd import json global data app = FastAPI() # Declare the continuous function as an async function. async def your_continuous_function(): import pandas as pd global data while True: print("data fetcher running.....") # Initialize an empty DataFrame to store the combined data combined_df = pd.DataFrame() # Loop through pages 1 to 4 for page in range(1,6): try: # Update the payload for each page url = "https://dev3.api.curfox.parallaxtec.com/api/admin/test-dataset?sort=id&paginate=2000&page="+str(page) payload = {} headers = { 'Accept': 'application/json', 'X-Tenant': 'mng', 'Authorization': 'Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJhdWQiOiIxIiwianRpIjoiNDE3YTIyNDE4MGUzNDJmMjRiYzlkMjAyYTk3NWE0MjRiYzhlY2I0NDM3NmI0ZDdiN2M5MTQ5NjI4YTM0ZWM1Y2ZlYmZhMGMzOWFkMmNkZWIiLCJpYXQiOjE2OTA3OTk4MjQuNzgwNzY4LCJuYmYiOjE2OTA3OTk4MjQuNzgwNzcyLCJleHAiOjE3MjI0MjIyMjQuNzcwOTk0LCJzdWIiOiIxIiwic2NvcGVzIjpbXX0.OCTRI6oDChKUHsZAbxdrsarqXJEuQpnNuQzo1cp4OWj2KddKfBCMCad9cFA5-aJ2S6INN1WsGZAEa1JtOLSzxnsfLIwRr73ODHiRZM9ZYOLdJukTisbO6ONOkAFbpE48o6CHTFYen_D2QYSxqKmQ1lCO2-FQn18RPK6QOiuHuW1KYTWtxo_wXncRl5Rt2PWyrm88iMClD969-oXcjk-wU7Wr1TFTY01JDLLHYe9sqfUTkRewpzFk2YIg9pt1yTGJRboyF9mqeVcq7fADdOzP7uqUFQnro-dHrzivYYYYXUmc0UBBu7t0FY2SerwHnWObNF-tMxyPX_9xGQjzy9H1NtbvxULIrnqA9WLJBqERhpRGlKH2S_C-eacAUwI548JkwHQGelRJHpAh36xaJqNsOGh0l2HX2LIF52TqdHevj-4FE4AFLPRPqGYYv-v-tL50LRHdbTmLw_n8bpeROba8dNoz7RC-YKdyYRpBsIiaIA78_ltk_-1Qme-u9ArX_Zycf2m9BkefEVpN1DUHOuPRm_pGAyIagkCmMfIFsV9rMiEhCtqT_IV0HmMrBw1OJ2m-c-DxXLdCLpjgiKc4HPb44aCxZgm8s0vJOGGP-2zajtQ1H2IpJ7Zkjyggl4frZ2EkwX4Ay0dsiQxTG5DgKOcQZfvCMgzJCtQPL7So7c8QcUk' } response = requests.request("GET", url, headers=headers, data=payload) import pandas as pd import json # Sample JSON response json_response = response.json() # Extracting 'data' for conversion data = json_response['data'] df = pd.json_normalize(data) # Concatenate the current page's DataFrame with the combined DataFrame combined_df = pd.concat([combined_df, df], ignore_index=True) except: print("data over") print("data collected....") data = combined_df[combined_df['status.name'].isin(['RETURN TO CLIENT', 'DELIVERED'])] data = data[['delivery_possibility','status.name']] data = data[data['delivery_possibility'].between(0, 100)] await asyncio.sleep(43200) # Adjust the sleep interval as needed # Create a startup event. @app.on_event("startup") async def startup_event(): # Start the continuous function as a background task. asyncio.create_task(your_continuous_function()) @app.get("/kpi_results") def read_root(): global data status_counts_more_than_80 = data[data['delivery_possibility'] > 80]['status.name'].value_counts() status_counts_50_to_80 = data[(data['delivery_possibility'] >= 50) & (data['delivery_possibility'] <= 80)]['status.name'].value_counts() status_counts_30_to_49 = data[(data['delivery_possibility'] >= 30) & (data['delivery_possibility'] <= 49)]['status.name'].value_counts() status_counts_below_30 = data[data['delivery_possibility'] < 30]['status.name'].value_counts() kpi_result = { "kpi_result": { "status_counts_more_than_80": { "correct_values": str(status_counts_more_than_80[0]), "incorrect_values": str(status_counts_more_than_80[1]) }, "status_counts_50_to_80": { "correct_values": str(status_counts_50_to_80[0]), "incorrect_values": str(status_counts_50_to_80[1]) }, "status_counts_30_to_49": { "correct_values": str(status_counts_30_to_49[0]), "incorrect_values": str(status_counts_30_to_49[1]) }, "status_counts_below_30": { "correct_values": str(status_counts_below_30[0]), "incorrect_values": str(status_counts_below_30[1]) } } } return kpi_result