Spaces:
Sleeping
Sleeping
import asyncio | |
from fastapi import FastAPI | |
from fastapi.middleware.cors import CORSMiddleware | |
import requests | |
import pandas as pd | |
import json | |
import aiohttp | |
global data | |
app = FastAPI() | |
app.add_middleware( | |
CORSMiddleware, | |
allow_origins=["*"], | |
allow_credentials=True, | |
allow_methods=["*"], | |
allow_headers=["*"], | |
) | |
# Declare the continuous function as an async function. | |
#async def your_continuous_function(): | |
def your_continuous_function_old(X_Tenant): | |
import pandas as pd | |
while True: | |
print("data fetcher running.....") | |
# Initialize an empty DataFrame to store the combined data | |
combined_df = pd.DataFrame() | |
url = "https://dev3.api.curfox.parallaxtec.com/api/ml/order-metadata" | |
payload = {} | |
headers = { | |
'Accept': 'application/json', | |
'X-Tenant': X_Tenant, #'royalexpress', | |
'Authorization': 'Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJhdWQiOiIxIiwianRpIjoiZWQzYjVkN2JkNTU5YmQxNWNmYzdiNThhM2UyZDlmNGEyMGQzMDFjMWY4ZWVlNDY2ZDBlZTAxYmMzZmVjMTU1ZWNjNzMxOWUxMGUxZGY3NDMiLCJpYXQiOjE3MDIyNzIyMDcuNjg0OTE2LCJuYmYiOjE3MDIyNzIyMDcuNjg0OTIzLCJleHAiOjE3MzM4OTQ2MDcuNjczNDYyLCJzdWIiOiIxIiwic2NvcGVzIjpbXX0.NFZvGO0GjoD7u3FRiIewRRoWu7ouUmKTKnCei8LMwQWzLntBLYcj_Bs21amjcHtzdbQNyCovHSDHJQaLJnD04kY1JRAdDC_OLi2YiZoSvnSJxNjWiuC4kwNE59Ndwu3o2iAzB-nd1EvyMnU_na7WxICRP8OegrpM-_q6M-wgnv7igaNeWjdxnXdtxbr-Zz7N2Xv2skWZwoDce37kWvH1tK7eqMK0uWqqyhBpli22CmkKPduHUNKMNOEnGTskeDaTuX5za2Lr8CNa34_FdKu3Y5CrFMGDBHT_UGALocpr80_38iifXm7WDl6ZIA1iYy6dBvCTeoC_aFo1X5FIrFbJgMCokW4VH0Q2ljm9ty0W7ATAiKrM1GIVFS5Dir4A1KI3LSeE459SqZpqsoJmaU95zSYbfnU_oZ9UpvW59nFgD6yJ8hGHyYnjhCS0jmxk3cq93T9X1rNWo2t0A3XYXgqZYnZrZpdrSbn-JVoX_NW1QC6RtmAGm7AtZ3GBrzxwu3m_7MicMI7Tu4W6d2WD9kZjq0khBUrm2DVZJzN2BRmH-a7JkAqJ0icpHQ_2Tc6T-95axebp6QEmHHXBKILNNwWxucZ0l-Ny0TuUivqn0m9gSJJDkA8ssWyBkzzJ9fUeRmJGbUFTeemPhMrF3_cvTUZ0J7IC2CK7qWePcHPQ-sy0is4' | |
} | |
count = requests.request("GET", url, headers=headers).json()["data"]["order_count"]//200 | |
count = count + 2 | |
print(count) | |
# Loop through pages 1 to 4 | |
for page in range(1,30): | |
try: | |
# Update the payload for each page | |
url = "https://dev3.api.curfox.parallaxtec.com/api/ml/order-list?sort=id&paginate=200&page="+str(page) | |
payload = {} | |
headers = { | |
'Accept': 'application/json', | |
'X-Tenant': 'royalexpress', | |
} | |
response = requests.request("GET", url, headers=headers) | |
import pandas as pd | |
import json | |
# Sample JSON response | |
json_response = response.json() | |
# Extracting 'data' for conversion | |
data = json_response['data'] | |
df = pd.json_normalize(data) | |
# Concatenate the current page's DataFrame with the combined DataFrame | |
combined_df = pd.concat([combined_df, df], ignore_index=True) | |
except: | |
print("data over") | |
print("data collected....") | |
data = combined_df[combined_df['status.name'].isin(['RETURN TO CLIENT', 'DELIVERED'])] | |
data = data[['delivery_possibility','status.name']] | |
data = data[data['delivery_possibility'].between(0, 100)] | |
return data | |
#await asyncio.sleep(43200) # Adjust the sleep interval as needed | |
# # Create a startup event. | |
# @app.on_event("startup") | |
# async def startup_event(): | |
# # Start the continuous function as a background task. | |
# asyncio.create_task(your_continuous_function()) | |
async def fetch_page(session, page ,X_Tenant): | |
try: | |
url = f"https://dev3.api.curfox.parallaxtec.com/api/ml/order-list?sort=id&paginate=200&page={page}" | |
headers = { | |
'Accept': 'application/json', | |
'X-Tenant': X_Tenant,#'royalexpress', | |
} | |
async with session.get(url, headers=headers) as response: | |
json_response = await response.json() | |
data = json_response['data'] | |
df = pd.json_normalize(data) | |
return df | |
except Exception as e: | |
print(f"Failed to fetch data for page {page}: {e}") | |
return pd.DataFrame() # Return an empty DataFrame in case of error | |
async def read_root(X_Tenant): | |
combined_df = pd.DataFrame() | |
async with aiohttp.ClientSession() as session: | |
tasks = [fetch_page(session, page, X_Tenant) for page in range(1, 30)] | |
results = await asyncio.gather(*tasks) | |
# Combine all the DataFrames from each page | |
combined_df = pd.concat(results, ignore_index=True) | |
print("Data collected....") | |
# Filter the data | |
filtered_data = combined_df[combined_df['status.name'].isin(['RETURN TO CLIENT', 'DELIVERED'])] | |
filtered_data = filtered_data[['delivery_possibility', 'status.name']] | |
filtered_data = filtered_data[filtered_data['delivery_possibility'].between(0, 100)] | |
# existing code=========================== | |
data = filtered_data | |
status_counts_more_than_80 = data[data['delivery_possibility'] > 80]['status.name'].value_counts() | |
status_counts_50_to_80 = data[(data['delivery_possibility'] >= 50) & (data['delivery_possibility'] <= 80)]['status.name'].value_counts() | |
status_counts_30_to_49 = data[(data['delivery_possibility'] >= 30) & (data['delivery_possibility'] <= 49)]['status.name'].value_counts() | |
status_counts_below_30 = data[data['delivery_possibility'] < 30]['status.name'].value_counts() | |
print(status_counts_more_than_80,status_counts_50_to_80,status_counts_30_to_49,status_counts_below_30) | |
try: | |
status_counts_more_than_80_0 = int(status_counts_more_than_80[0]) | |
except: | |
status_counts_more_than_80_0 = 0 | |
try: | |
status_counts_more_than_80_1 = int(status_counts_more_than_80[1]) | |
except: | |
status_counts_more_than_80_1 = 0 | |
try: | |
status_counts_50_to_80_0 = int(status_counts_50_to_80[0]) | |
except: | |
status_counts_50_to_80_0 = 0 | |
try: | |
status_counts_50_to_80_1 = int(status_counts_50_to_80[1]) | |
except: | |
status_counts_50_to_80_1 = 0 | |
try: | |
status_counts_30_to_49_0 = int(status_counts_30_to_49[0]) | |
except: | |
status_counts_30_to_49_0 = 0 | |
try: | |
status_counts_30_to_49_1 = int(status_counts_30_to_49[1]) | |
except: | |
status_counts_30_to_49_1 = 0 | |
try: | |
status_counts_below_30_0 = int(status_counts_below_30[0]) | |
except: | |
status_counts_below_30_0 = 0 | |
try: | |
status_counts_below_30_1 = int(status_counts_below_30[1]) | |
except: | |
status_counts_below_30_1 = 0 | |
kpi_result = { | |
"kpi_result": { | |
"status_counts_more_than_80": { | |
"correct_values": status_counts_more_than_80_0, | |
"incorrect_values": status_counts_more_than_80_1 | |
}, | |
"status_counts_50_to_80": { | |
"correct_values": status_counts_50_to_80_0, | |
"incorrect_values": status_counts_50_to_80_1 | |
}, | |
"status_counts_30_to_49": { | |
"correct_values": status_counts_30_to_49_0, | |
"incorrect_values": status_counts_30_to_49_1 | |
}, | |
"status_counts_below_30": { | |
"correct_values": status_counts_below_30_0, | |
"incorrect_values": status_counts_below_30_1 | |
} | |
} | |
} | |
return kpi_result |