curfox_kpi_api / main.py
Arafath10's picture
Update main.py
8f5b90d verified
raw
history blame
9.09 kB
import asyncio
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
import requests
import pandas as pd
import json
import aiohttp
global data
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Declare the continuous function as an async function.
#async def your_continuous_function():
def your_continuous_function_old(X_Tenant):
import pandas as pd
while True:
print("data fetcher running.....")
# Initialize an empty DataFrame to store the combined data
combined_df = pd.DataFrame()
url = "https://dev3.api.curfox.parallaxtec.com/api/ml/order-metadata"
payload = {}
headers = {
'Accept': 'application/json',
'X-Tenant': X_Tenant, #'royalexpress',
'Authorization': 'Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJhdWQiOiIxIiwianRpIjoiZWQzYjVkN2JkNTU5YmQxNWNmYzdiNThhM2UyZDlmNGEyMGQzMDFjMWY4ZWVlNDY2ZDBlZTAxYmMzZmVjMTU1ZWNjNzMxOWUxMGUxZGY3NDMiLCJpYXQiOjE3MDIyNzIyMDcuNjg0OTE2LCJuYmYiOjE3MDIyNzIyMDcuNjg0OTIzLCJleHAiOjE3MzM4OTQ2MDcuNjczNDYyLCJzdWIiOiIxIiwic2NvcGVzIjpbXX0.NFZvGO0GjoD7u3FRiIewRRoWu7ouUmKTKnCei8LMwQWzLntBLYcj_Bs21amjcHtzdbQNyCovHSDHJQaLJnD04kY1JRAdDC_OLi2YiZoSvnSJxNjWiuC4kwNE59Ndwu3o2iAzB-nd1EvyMnU_na7WxICRP8OegrpM-_q6M-wgnv7igaNeWjdxnXdtxbr-Zz7N2Xv2skWZwoDce37kWvH1tK7eqMK0uWqqyhBpli22CmkKPduHUNKMNOEnGTskeDaTuX5za2Lr8CNa34_FdKu3Y5CrFMGDBHT_UGALocpr80_38iifXm7WDl6ZIA1iYy6dBvCTeoC_aFo1X5FIrFbJgMCokW4VH0Q2ljm9ty0W7ATAiKrM1GIVFS5Dir4A1KI3LSeE459SqZpqsoJmaU95zSYbfnU_oZ9UpvW59nFgD6yJ8hGHyYnjhCS0jmxk3cq93T9X1rNWo2t0A3XYXgqZYnZrZpdrSbn-JVoX_NW1QC6RtmAGm7AtZ3GBrzxwu3m_7MicMI7Tu4W6d2WD9kZjq0khBUrm2DVZJzN2BRmH-a7JkAqJ0icpHQ_2Tc6T-95axebp6QEmHHXBKILNNwWxucZ0l-Ny0TuUivqn0m9gSJJDkA8ssWyBkzzJ9fUeRmJGbUFTeemPhMrF3_cvTUZ0J7IC2CK7qWePcHPQ-sy0is4'
}
count = requests.request("GET", url, headers=headers).json()["data"]["order_count"]//200
count = count + 2
print(count)
# Loop through pages 1 to 4
for page in range(1,30):
try:
# Update the payload for each page
url = "https://dev3.api.curfox.parallaxtec.com/api/ml/order-list?sort=id&paginate=200&page="+str(page)
payload = {}
headers = {
'Accept': 'application/json',
'X-Tenant': 'royalexpress',
}
response = requests.request("GET", url, headers=headers)
import pandas as pd
import json
# Sample JSON response
json_response = response.json()
# Extracting 'data' for conversion
data = json_response['data']
df = pd.json_normalize(data)
# Concatenate the current page's DataFrame with the combined DataFrame
combined_df = pd.concat([combined_df, df], ignore_index=True)
except:
print("data over")
print("data collected....")
data = combined_df[combined_df['status.name'].isin(['RETURN TO CLIENT', 'DELIVERED'])]
data = data[['delivery_possibility','status.name']]
data = data[data['delivery_possibility'].between(0, 100)]
return data
#await asyncio.sleep(43200) # Adjust the sleep interval as needed
# # Create a startup event.
# @app.on_event("startup")
# async def startup_event():
# # Start the continuous function as a background task.
# asyncio.create_task(your_continuous_function())
async def fetch_data(session, url, headers):
async with session.get(url, headers=headers) as response:
return await response.json()
async def your_continuous_function(X_Tenant):
combined_df = pd.DataFrame()
base_url = "https://dev3.api.curfox.parallaxtec.com/api/ml/order-list"
headers = {
'Accept': 'application/json',
'X-Tenant': X_Tenant,
'Authorization': 'Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJhdWQiOiIxIiwianRpIjoiZWQzYjVkN2JkNTU5YmQxNWNmYzdiNThhM2UyZDlmNGEyMGQzMDFjMWY4ZWVlNDY2ZDBlZTAxYmMzZmVjMTU1ZWNjNzMxOWUxMGUxZGY3NDMiLCJpYXQiOjE3MDIyNzIyMDcuNjg0OTE2LCJuYmYiOjE3MDIyNzIyMDcuNjg0OTIzLCJleHAiOjE3MzM4OTQ2MDcuNjczNDYyLCJzdWIiOiIxIiwic2NvcGVzIjpbXX0.NFZvGO0GjoD7u3FRiIewRRoWu7ouUmKTKnCei8LMwQWzLntBLYcj_Bs21amjcHtzdbQNyCovHSDHJQaLJnD04kY1JRAdDC_OLi2YiZoSvnSJxNjWiuC4kwNE59Ndwu3o2iAzB-nd1EvyMnU_na7WxICRP8OegrpM-_q6M-wgnv7igaNeWjdxnXdtxbr-Zz7N2Xv2skWZwoDce37kWvH1tK7eqMK0uWqqyhBpli22CmkKPduHUNKMNOEnGTskeDaTuX5za2Lr8CNa34_FdKu3Y5CrFMGDBHT_UGALocpr80_38iifXm7WDl6ZIA1iYy6dBvCTeoC_aFo1X5FIrFbJgMCokW4VH0Q2ljm9ty0W7ATAiKrM1GIVFS5Dir4A1KI3LSeE459SqZpqsoJmaU95zSYbfnU_oZ9UpvW59nFgD6yJ8hGHyYnjhCS0jmxk3cq93T9X1rNWo2t0A3XYXgqZYnZrZpdrSbn-JVoX_NW1QC6RtmAGm7AtZ3GBrzxwu3m_7MicMI7Tu4W6d2WD9kZjq0khBUrm2DVZJzN2BRmH-a7JkAqJ0icpHQ_2Tc6T-95axebp6QEmHHXBKILNNwWxucZ0l-Ny0TuUivqn0m9gSJJDkA8ssWyBkzzJ9fUeRmJGbUFTeemPhMrF3_cvTUZ0J7IC2CK7qWePcHPQ-sy0is4'
}
async with aiohttp.ClientSession() as session:
# Fetch the order count to determine the number of pages
order_metadata_url = "https://dev3.api.curfox.parallaxtec.com/api/ml/order-metadata"
metadata_response = await fetch_data(session, order_metadata_url, headers)
order_count = metadata_response["data"]["order_count"] // 200 + 2
tasks = []
for page in range(1, 30):
page_url = f"{base_url}?sort=id&paginate=200&page={page}"
tasks.append(fetch_data(session, page_url, headers))
responses = await asyncio.gather(*tasks)
for json_response in responses:
data = json_response.get('data', [])
if data:
df = pd.json_normalize(data)
combined_df = pd.concat([combined_df, df], ignore_index=True)
# After processing, you can return the DataFrame as a response or save it as needed.
# Here, for example, we're just returning the number of rows fetched.
print("data collected....")
data = combined_df[combined_df['status.name'].isin(['RETURN TO CLIENT', 'DELIVERED'])]
data = data[['delivery_possibility','status.name']]
data = data[data['delivery_possibility'].between(0, 100)]
return data
@app.get("/kpi_results")
def read_root(X_Tenant):
data = await your_continuous_function(X_Tenant)
status_counts_more_than_80 = data[data['delivery_possibility'] > 80]['status.name'].value_counts()
status_counts_50_to_80 = data[(data['delivery_possibility'] >= 50) & (data['delivery_possibility'] <= 80)]['status.name'].value_counts()
status_counts_30_to_49 = data[(data['delivery_possibility'] >= 30) & (data['delivery_possibility'] <= 49)]['status.name'].value_counts()
status_counts_below_30 = data[data['delivery_possibility'] < 30]['status.name'].value_counts()
print(status_counts_more_than_80,status_counts_50_to_80,status_counts_30_to_49,status_counts_below_30)
try:
status_counts_more_than_80_0 = int(status_counts_more_than_80[0])
except:
status_counts_more_than_80_0 = 0
try:
status_counts_more_than_80_1 = int(status_counts_more_than_80[1])
except:
status_counts_more_than_80_1 = 0
try:
status_counts_50_to_80_0 = int(status_counts_50_to_80[0])
except:
status_counts_50_to_80_0 = 0
try:
status_counts_50_to_80_1 = int(status_counts_50_to_80[1])
except:
status_counts_50_to_80_1 = 0
try:
status_counts_30_to_49_0 = int(status_counts_30_to_49[0])
except:
status_counts_30_to_49_0 = 0
try:
status_counts_30_to_49_1 = int(status_counts_30_to_49[1])
except:
status_counts_30_to_49_1 = 0
try:
status_counts_below_30_0 = int(status_counts_below_30[0])
except:
status_counts_below_30_0 = 0
try:
status_counts_below_30_1 = int(status_counts_below_30[1])
except:
status_counts_below_30_1 = 0
kpi_result = {
"kpi_result": {
"status_counts_more_than_80": {
"correct_values": status_counts_more_than_80_0,
"incorrect_values": status_counts_more_than_80_1
},
"status_counts_50_to_80": {
"correct_values": status_counts_50_to_80_0,
"incorrect_values": status_counts_50_to_80_1
},
"status_counts_30_to_49": {
"correct_values": status_counts_30_to_49_0,
"incorrect_values": status_counts_30_to_49_1
},
"status_counts_below_30": {
"correct_values": status_counts_below_30_0,
"incorrect_values": status_counts_below_30_1
}
}
}
return kpi_result