Spaces:
Sleeping
Sleeping
File size: 7,735 Bytes
e37910a 9279b49 e37910a 21011da e37910a 8f5b90d feca41c e37910a feca41c 21011da 9279b49 e37910a 164af34 1817e05 76bba56 e37910a 40b0cf5 e37910a a1c52fc 164af34 a1c52fc f23be80 a1c52fc e37910a 5bc911e e37910a edc2b1b e37910a 67b96fc e37910a 819b794 e37910a 164af34 21011da 164af34 21011da 43fcc56 1817e05 43fcc56 1817e05 43fcc56 1817e05 e37910a 1d10b93 43fcc56 e37910a 79cca34 3a16e4d 459dd2b 3a16e4d e37910a 459dd2b 3a16e4d 459dd2b 3a16e4d 459dd2b 3a16e4d 459dd2b 3a16e4d 459dd2b e37910a 3b7ebee |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 |
import asyncio
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
import requests
import pandas as pd
import json
import aiohttp
global data
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Declare the continuous function as an async function.
#async def your_continuous_function():
def your_continuous_function_old(X_Tenant):
import pandas as pd
while True:
print("data fetcher running.....")
# Initialize an empty DataFrame to store the combined data
combined_df = pd.DataFrame()
url = "https://dev3.api.curfox.parallaxtec.com/api/ml/order-metadata"
payload = {}
headers = {
'Accept': 'application/json',
'X-Tenant': X_Tenant, #'royalexpress',
'Authorization': 'Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJhdWQiOiIxIiwianRpIjoiZWQzYjVkN2JkNTU5YmQxNWNmYzdiNThhM2UyZDlmNGEyMGQzMDFjMWY4ZWVlNDY2ZDBlZTAxYmMzZmVjMTU1ZWNjNzMxOWUxMGUxZGY3NDMiLCJpYXQiOjE3MDIyNzIyMDcuNjg0OTE2LCJuYmYiOjE3MDIyNzIyMDcuNjg0OTIzLCJleHAiOjE3MzM4OTQ2MDcuNjczNDYyLCJzdWIiOiIxIiwic2NvcGVzIjpbXX0.NFZvGO0GjoD7u3FRiIewRRoWu7ouUmKTKnCei8LMwQWzLntBLYcj_Bs21amjcHtzdbQNyCovHSDHJQaLJnD04kY1JRAdDC_OLi2YiZoSvnSJxNjWiuC4kwNE59Ndwu3o2iAzB-nd1EvyMnU_na7WxICRP8OegrpM-_q6M-wgnv7igaNeWjdxnXdtxbr-Zz7N2Xv2skWZwoDce37kWvH1tK7eqMK0uWqqyhBpli22CmkKPduHUNKMNOEnGTskeDaTuX5za2Lr8CNa34_FdKu3Y5CrFMGDBHT_UGALocpr80_38iifXm7WDl6ZIA1iYy6dBvCTeoC_aFo1X5FIrFbJgMCokW4VH0Q2ljm9ty0W7ATAiKrM1GIVFS5Dir4A1KI3LSeE459SqZpqsoJmaU95zSYbfnU_oZ9UpvW59nFgD6yJ8hGHyYnjhCS0jmxk3cq93T9X1rNWo2t0A3XYXgqZYnZrZpdrSbn-JVoX_NW1QC6RtmAGm7AtZ3GBrzxwu3m_7MicMI7Tu4W6d2WD9kZjq0khBUrm2DVZJzN2BRmH-a7JkAqJ0icpHQ_2Tc6T-95axebp6QEmHHXBKILNNwWxucZ0l-Ny0TuUivqn0m9gSJJDkA8ssWyBkzzJ9fUeRmJGbUFTeemPhMrF3_cvTUZ0J7IC2CK7qWePcHPQ-sy0is4'
}
count = requests.request("GET", url, headers=headers).json()["data"]["order_count"]//200
count = count + 2
print(count)
# Loop through pages 1 to 4
for page in range(1,30):
try:
# Update the payload for each page
url = "https://dev3.api.curfox.parallaxtec.com/api/ml/order-list?sort=id&paginate=200&page="+str(page)
payload = {}
headers = {
'Accept': 'application/json',
'X-Tenant': 'royalexpress',
}
response = requests.request("GET", url, headers=headers)
import pandas as pd
import json
# Sample JSON response
json_response = response.json()
# Extracting 'data' for conversion
data = json_response['data']
df = pd.json_normalize(data)
# Concatenate the current page's DataFrame with the combined DataFrame
combined_df = pd.concat([combined_df, df], ignore_index=True)
except:
print("data over")
print("data collected....")
data = combined_df[combined_df['status.name'].isin(['RETURN TO CLIENT', 'DELIVERED'])]
data = data[['delivery_possibility','status.name']]
data = data[data['delivery_possibility'].between(0, 100)]
return data
#await asyncio.sleep(43200) # Adjust the sleep interval as needed
# # Create a startup event.
# @app.on_event("startup")
# async def startup_event():
# # Start the continuous function as a background task.
# asyncio.create_task(your_continuous_function())
async def fetch_page(session, page ,X_Tenant):
try:
url = f"https://dev3.api.curfox.parallaxtec.com/api/ml/order-list?sort=id&paginate=200&page={page}"
headers = {
'Accept': 'application/json',
'X-Tenant': X_Tenant,#'royalexpress',
}
async with session.get(url, headers=headers) as response:
json_response = await response.json()
data = json_response['data']
df = pd.json_normalize(data)
return df
except Exception as e:
print(f"Failed to fetch data for page {page}: {e}")
return pd.DataFrame() # Return an empty DataFrame in case of error
@app.get("/kpi_results")
async def read_root(X_Tenant):
combined_df = pd.DataFrame()
async with aiohttp.ClientSession() as session:
tasks = [fetch_page(session, page, X_Tenant) for page in range(1, 30)]
results = await asyncio.gather(*tasks)
# Combine all the DataFrames from each page
combined_df = pd.concat(results, ignore_index=True)
print("Data collected....")
# Filter the data
filtered_data = combined_df[combined_df['status.name'].isin(['RETURN TO CLIENT', 'DELIVERED'])]
filtered_data = filtered_data[['delivery_possibility', 'status.name']]
filtered_data = filtered_data[filtered_data['delivery_possibility'].between(0, 100)]
# existing code===========================
data = filtered_data
status_counts_more_than_80 = data[data['delivery_possibility'] > 80]['status.name'].value_counts()
status_counts_50_to_80 = data[(data['delivery_possibility'] >= 50) & (data['delivery_possibility'] <= 80)]['status.name'].value_counts()
status_counts_30_to_49 = data[(data['delivery_possibility'] >= 30) & (data['delivery_possibility'] <= 49)]['status.name'].value_counts()
status_counts_below_30 = data[data['delivery_possibility'] < 30]['status.name'].value_counts()
print(status_counts_more_than_80,status_counts_50_to_80,status_counts_30_to_49,status_counts_below_30)
try:
status_counts_more_than_80_0 = int(status_counts_more_than_80[0])
except:
status_counts_more_than_80_0 = 0
try:
status_counts_more_than_80_1 = int(status_counts_more_than_80[1])
except:
status_counts_more_than_80_1 = 0
try:
status_counts_50_to_80_0 = int(status_counts_50_to_80[0])
except:
status_counts_50_to_80_0 = 0
try:
status_counts_50_to_80_1 = int(status_counts_50_to_80[1])
except:
status_counts_50_to_80_1 = 0
try:
status_counts_30_to_49_0 = int(status_counts_30_to_49[0])
except:
status_counts_30_to_49_0 = 0
try:
status_counts_30_to_49_1 = int(status_counts_30_to_49[1])
except:
status_counts_30_to_49_1 = 0
try:
status_counts_below_30_0 = int(status_counts_below_30[0])
except:
status_counts_below_30_0 = 0
try:
status_counts_below_30_1 = int(status_counts_below_30[1])
except:
status_counts_below_30_1 = 0
kpi_result = {
"kpi_result": {
"status_counts_more_than_80": {
"correct_values": status_counts_more_than_80_0,
"incorrect_values": status_counts_more_than_80_1
},
"status_counts_50_to_80": {
"correct_values": status_counts_50_to_80_0,
"incorrect_values": status_counts_50_to_80_1
},
"status_counts_30_to_49": {
"correct_values": status_counts_30_to_49_0,
"incorrect_values": status_counts_30_to_49_1
},
"status_counts_below_30": {
"correct_values": status_counts_below_30_0,
"incorrect_values": status_counts_below_30_1
}
}
}
return kpi_result |