Spaces:
Sleeping
Sleeping
File size: 6,056 Bytes
e37910a 9279b49 e37910a 21011da e37910a feca41c e37910a feca41c 21011da 9279b49 e37910a 76bba56 e37910a 1f1cdf5 e37910a a1c52fc ccda928 a1c52fc e37910a a1c52fc e37910a 819b794 e37910a 8233aaf 21011da e37910a 21011da e37910a 459dd2b e37910a 459dd2b 6fd8a4a 459dd2b 6fd8a4a 459dd2b 6fd8a4a 459dd2b 6fd8a4a 459dd2b e37910a 3b7ebee |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 |
import asyncio
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
import requests
import pandas as pd
import json
global data
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Declare the continuous function as an async function.
async def your_continuous_function():
import pandas as pd
global data
while True:
print("data fetcher running.....")
# Initialize an empty DataFrame to store the combined data
combined_df = pd.DataFrame()
url = "'https://dev3.api.curfox.parallaxtec.com/api/ml/order-metadata"
payload = {}
headers = {
'Accept': 'application/json',
'X-Tenant': 'mng',
'Authorization': 'Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJhdWQiOiIxIiwianRpIjoiZWQzYjVkN2JkNTU5YmQxNWNmYzdiNThhM2UyZDlmNGEyMGQzMDFjMWY4ZWVlNDY2ZDBlZTAxYmMzZmVjMTU1ZWNjNzMxOWUxMGUxZGY3NDMiLCJpYXQiOjE3MDIyNzIyMDcuNjg0OTE2LCJuYmYiOjE3MDIyNzIyMDcuNjg0OTIzLCJleHAiOjE3MzM4OTQ2MDcuNjczNDYyLCJzdWIiOiIxIiwic2NvcGVzIjpbXX0.NFZvGO0GjoD7u3FRiIewRRoWu7ouUmKTKnCei8LMwQWzLntBLYcj_Bs21amjcHtzdbQNyCovHSDHJQaLJnD04kY1JRAdDC_OLi2YiZoSvnSJxNjWiuC4kwNE59Ndwu3o2iAzB-nd1EvyMnU_na7WxICRP8OegrpM-_q6M-wgnv7igaNeWjdxnXdtxbr-Zz7N2Xv2skWZwoDce37kWvH1tK7eqMK0uWqqyhBpli22CmkKPduHUNKMNOEnGTskeDaTuX5za2Lr8CNa34_FdKu3Y5CrFMGDBHT_UGALocpr80_38iifXm7WDl6ZIA1iYy6dBvCTeoC_aFo1X5FIrFbJgMCokW4VH0Q2ljm9ty0W7ATAiKrM1GIVFS5Dir4A1KI3LSeE459SqZpqsoJmaU95zSYbfnU_oZ9UpvW59nFgD6yJ8hGHyYnjhCS0jmxk3cq93T9X1rNWo2t0A3XYXgqZYnZrZpdrSbn-JVoX_NW1QC6RtmAGm7AtZ3GBrzxwu3m_7MicMI7Tu4W6d2WD9kZjq0khBUrm2DVZJzN2BRmH-a7JkAqJ0icpHQ_2Tc6T-95axebp6QEmHHXBKILNNwWxucZ0l-Ny0TuUivqn0m9gSJJDkA8ssWyBkzzJ9fUeRmJGbUFTeemPhMrF3_cvTUZ0J7IC2CK7qWePcHPQ-sy0is4'
}
count = requests.request("GET", url, headers=headers).json()["data"]["order_count"]//2000
count = count + 2
print(count)
# Loop through pages 1 to 4
for page in range(1,count):
try:
# Update the payload for each page
url = "https://dev3.api.curfox.parallaxtec.com/api/admin/test-dataset?sort=id&paginate=2000&page="+str(page)
payload = {}
headers = {
'Accept': 'application/json',
'X-Tenant': 'mng',
'Authorization': 'Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJhdWQiOiIxIiwianRpIjoiNDE3YTIyNDE4MGUzNDJmMjRiYzlkMjAyYTk3NWE0MjRiYzhlY2I0NDM3NmI0ZDdiN2M5MTQ5NjI4YTM0ZWM1Y2ZlYmZhMGMzOWFkMmNkZWIiLCJpYXQiOjE2OTA3OTk4MjQuNzgwNzY4LCJuYmYiOjE2OTA3OTk4MjQuNzgwNzcyLCJleHAiOjE3MjI0MjIyMjQuNzcwOTk0LCJzdWIiOiIxIiwic2NvcGVzIjpbXX0.OCTRI6oDChKUHsZAbxdrsarqXJEuQpnNuQzo1cp4OWj2KddKfBCMCad9cFA5-aJ2S6INN1WsGZAEa1JtOLSzxnsfLIwRr73ODHiRZM9ZYOLdJukTisbO6ONOkAFbpE48o6CHTFYen_D2QYSxqKmQ1lCO2-FQn18RPK6QOiuHuW1KYTWtxo_wXncRl5Rt2PWyrm88iMClD969-oXcjk-wU7Wr1TFTY01JDLLHYe9sqfUTkRewpzFk2YIg9pt1yTGJRboyF9mqeVcq7fADdOzP7uqUFQnro-dHrzivYYYYXUmc0UBBu7t0FY2SerwHnWObNF-tMxyPX_9xGQjzy9H1NtbvxULIrnqA9WLJBqERhpRGlKH2S_C-eacAUwI548JkwHQGelRJHpAh36xaJqNsOGh0l2HX2LIF52TqdHevj-4FE4AFLPRPqGYYv-v-tL50LRHdbTmLw_n8bpeROba8dNoz7RC-YKdyYRpBsIiaIA78_ltk_-1Qme-u9ArX_Zycf2m9BkefEVpN1DUHOuPRm_pGAyIagkCmMfIFsV9rMiEhCtqT_IV0HmMrBw1OJ2m-c-DxXLdCLpjgiKc4HPb44aCxZgm8s0vJOGGP-2zajtQ1H2IpJ7Zkjyggl4frZ2EkwX4Ay0dsiQxTG5DgKOcQZfvCMgzJCtQPL7So7c8QcUk'
}
response = requests.request("GET", url, headers=headers)
import pandas as pd
import json
# Sample JSON response
json_response = response.json()
# Extracting 'data' for conversion
data = json_response['data']
df = pd.json_normalize(data)
# Concatenate the current page's DataFrame with the combined DataFrame
combined_df = pd.concat([combined_df, df], ignore_index=True)
except:
print("data over")
print("data collected....")
data = combined_df[combined_df['status.name'].isin(['RETURN TO CLIENT', 'DELIVERED'])]
data = data[['delivery_possibility','status.name']]
data = data[data['delivery_possibility'].between(0, 100)]
await asyncio.sleep(43200) # Adjust the sleep interval as needed
# Create a startup event.
@app.on_event("startup")
async def startup_event():
# Start the continuous function as a background task.
asyncio.create_task(your_continuous_function())
@app.get("/kpi_results")
def read_root():
global data
status_counts_more_than_80 = data[data['delivery_possibility'] > 80]['status.name'].value_counts()
status_counts_50_to_80 = data[(data['delivery_possibility'] >= 50) & (data['delivery_possibility'] <= 80)]['status.name'].value_counts()
status_counts_30_to_49 = data[(data['delivery_possibility'] >= 30) & (data['delivery_possibility'] <= 49)]['status.name'].value_counts()
status_counts_below_30 = data[data['delivery_possibility'] < 30]['status.name'].value_counts()
kpi_result = {
"kpi_result": {
"status_counts_more_than_80": {
"correct_values": int(status_counts_more_than_80[0]),
"incorrect_values": int(status_counts_more_than_80[1])
},
"status_counts_50_to_80": {
"correct_values": int(status_counts_50_to_80[0]),
"incorrect_values": int(status_counts_50_to_80[1])
},
"status_counts_30_to_49": {
"correct_values": int(status_counts_30_to_49[0]),
"incorrect_values": int(status_counts_30_to_49[1])
},
"status_counts_below_30": {
"correct_values": int(status_counts_below_30[0]),
"incorrect_values": int(status_counts_below_30[1])
}
}
}
return kpi_result |