File size: 9,549 Bytes
50f144e
838216d
d92c861
be531b6
6e48fd5
8683a5b
 
5a3a837
515250c
54db18f
d92c861
 
 
 
 
 
 
 
 
a68555b
 
 
 
 
 
 
 
 
 
0371cd5
19c1920
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0371cd5
19c1920
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
eb151cd
19c1920
 
 
 
5383d2f
5409f6f
 
 
19c1920
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5383d2f
19c1920
 
5383d2f
 
 
 
 
 
 
 
 
 
 
19c1920
 
5409f6f
19c1920
 
 
 
 
 
8683a5b
5a3a837
 
 
 
 
 
8683a5b
5a3a837
 
 
8683a5b
5a3a837
 
8683a5b
5a3a837
 
 
 
8683a5b
5a3a837
8683a5b
5a3a837
8683a5b
5a95277
6bbb574
9cc418e
 
b6df520
5a3a837
515250c
 
 
 
 
 
 
 
 
 
 
 
 
 
fbd8c25
5a3a837
 
 
 
8683a5b
5a3a837
 
 
 
 
 
fbd8c25
5a3a837
9cc418e
 
5a3a837
19c1920
 
5a3a837
 
 
fbd8c25
5a3a837
9cc418e
 
5a3a837
19c1920
 
5a3a837
8683a5b
19c1920
 
838216d
 
8345257
5a3a837
838216d
8345257
5a95277
 
8345257
5a3a837
5a95277
5a3a837
 
 
fbd8c25
 
 
 
0371cd5
19c1920
 
 
 
 
8345257
19c1920
838216d
6d728e0
838216d
 
 
19c1920
4301e6f
19c1920
 
 
 
 
5409f6f
e7ab121
 
 
 
 
19c1920
 
 
e7ab121
5383d2f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50f144e
 
 
aa96acc
 
e7ab121
 
 
 
 
50f144e
5383d2f
 
 
 
19c1920
838216d
19c1920
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
from fastapi import FastAPI, HTTPException ,Query
from fastapi.responses import JSONResponse
from fastapi.middleware.cors import CORSMiddleware
import data_collector as dc
import pandas as pd
from prophet import Prophet
import math
from concurrent.futures import ThreadPoolExecutor, as_completed
from datetime import datetime

app = FastAPI()
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)


import mysql.connector
import json
    
# Define connection parameters
host = "68.183.225.237"
user = "sm_ml"
password = "Fz6/I733"
database = "sm_qa_1"

def insert_data(b_id,forecast_data):#mysql-connector-python
    # Establish connection
    connection = mysql.connector.connect(
        host=host,
        user=user,
        password=password,
        database=database
    )
    # Create a cursor object
    cursor = connection.cursor()
    # Convert forecast_data to JSON string
    forecast_data_json = json.dumps(forecast_data)
    
    # SQL command to insert data
    insert_query = """
    INSERT INTO sm_product_count_forecast (bid, forecast_data)
    VALUES (%s, %s)
    """
    
    # Execute the SQL command with data
    cursor.execute(insert_query, (b_id, forecast_data_json))
    
    # Commit the transaction
    connection.commit()
    
    print("Data inserted successfully")
    
    # Close the cursor and connection
    cursor.close()
    connection.close()


def delete_json(b_id):
    # Establish connection
    connection = mysql.connector.connect(
        host=host,
        user=user,
        password=password,
        database=database
    )
    # Create a cursor object
    cursor = connection.cursor()
    # SQL command to delete a specific record
    delete_query = """
    DELETE FROM sm_product_count_forecast
    WHERE bid = %s
    """
    
    # Execute the SQL command with the specified BID
    cursor.execute(delete_query, (b_id,))
    # Commit the transaction
    connection.commit()
    print(f"Record with BID {b_id} deleted successfully")
    # Close the cursor and connection
    cursor.close()
    connection.close()

def get_data(b_id,page_number,page_size,search):
    start = (page_number - 1) * page_size
    end = start + page_size

    # Establish connection
    connection = mysql.connector.connect(
        host=host,
        user=user,
        password=password,
        database=database
    )
    
    # Create a cursor object
    cursor = connection.cursor()
    
    # SQL command to select data for a specific BID
    select_query = """
    SELECT bid, forecast_data, created_at
    FROM sm_product_count_forecast
    WHERE bid = %s
    """
    
    # Execute the SQL command with the specified BID
    cursor.execute(select_query, (b_id,))
    
    # Fetch the result
    row = cursor.fetchone()
    
    if row:
        bid = row[0]
        forecast_data_json = row[1]
        created_at = row[2]
            
        # Convert JSON string back to Python dictionary
        forecast_data = json.loads(forecast_data_json)
        
        if search==False:
            result = {
                "BID":bid,
                "created_at":created_at,
                "forecast_data":forecast_data[start:end]
            }
            return result
        else:
            return forecast_data
            
        
    else:
        return f"plesae genertae forcast for this business id:{b_id}"
    
    # Close the cursor and connection
    cursor.close()
    connection.close()


def forecast(monthly_sales):
    # Prepare the data for Prophet
    monthly_sales.rename(columns={'transaction_date': 'ds', 'sell_qty': 'y'}, inplace=True)

    # Initialize and fit the Prophet model
    model = Prophet()
    model.fit(monthly_sales)

    # Make a future dataframe for the next month
    future = model.make_future_dataframe(periods=1, freq='M')
    forecast = model.predict(future)

    # Extract the forecasted sales for the next month
    forecasted_sales = forecast[['ds', 'yhat']].tail(2)

    # Combine historical and forecasted data
    combined_sales = pd.concat([monthly_sales, forecasted_sales[-1:]], ignore_index=True)
    original_forecasted_value = combined_sales.tail(1)
    rounded_value = combined_sales.tail(1)

    rounded_value['yhat'] = rounded_value['yhat'].apply(lambda x: max(0, math.ceil(x)))

    return combined_sales, original_forecasted_value, rounded_value

def process_product(product_name, data):
    full_trend = ""
    brand_name = data['brand_name'].tolist()[0]
    product_image =  data['product_image'].tolist()[0]
    sub_sku = data['sub_sku'].tolist()[0]
    try:
        # Get today's date
        today = datetime.today().strftime('%Y-%m-%d %H:%M:%S')
    
        # Create a new fake transaction with today's date and selling count 0
        fake_transaction = data.iloc[0].copy()
        fake_transaction['transaction_date'] = today
        fake_transaction['sell_qty'] = 0
    
        # Convert fake_transaction to a DataFrame
        fake_transaction_df = pd.DataFrame([fake_transaction])
    
        # Concatenate the original DataFrame with the new fake transaction DataFrame
        data = pd.concat([data, fake_transaction_df], ignore_index=True)
        
        
        # Summarize the sales count per month
        data['transaction_date'] = pd.to_datetime(data['transaction_date'])
        data.set_index('transaction_date', inplace=True)
        monthly_sales = data['sell_qty'].resample('M').sum().reset_index()

        full_trend, forecasted_value, rounded_value = forecast(monthly_sales)
        rounded_value.columns = ["next_month", "y", "predicted_count"]
        # Convert to dictionary
        result_dict = rounded_value.to_dict(orient="records")[0]

        return {
            "sub_sku": sub_sku,
            "Product Name": product_name,
            "brand_name" : brand_name,
            "product_image" : f"http://app.storematepro.lk/img/{product_image}",
            "next_month": str(result_dict["next_month"]),
            "predicted_count": result_dict["predicted_count"]
            #"full_trend" : str(full_trend)
        }
    except Exception as e:
        return {
            "sub_sku": sub_sku,
            "Product Name": product_name,
            "brand_name" : brand_name,
            "product_image" : f"http://app.storematepro.lk/img/{product_image}",
            "next_month": str(e),
            "predicted_count": "not predicted"
            #"full_trend" : str(full_trend)
        }

@app.post("/generate_product_count_prediction")
async def generate_product_count_prediction(b_id: int):
    try:
        # main
        data, message = dc.get_data(b_id=b_id, product_name="sample")

        if message == "done":

            grouped_df = data.groupby('product_name')

            results = []
            with ThreadPoolExecutor() as executor:
                futures = [executor.submit(process_product, product_name, product_df.copy()) for product_name, product_df in grouped_df]
                for future in as_completed(futures):
                    results.append(future.result())

            try:
               delete_json(b_id)
            except:
               print("no errors")
            insert_data(b_id,results)
            return {"status": "success",
                    "b_id":b_id,
                    "message": "Prediction successful and saved to DB",
                    "status_code":200
                   }
            

    except Exception as e:
        print(str(e))
        response_content = {
            "status": "error",
            "message": str(e),
            "data": None,
            "status_code":500
        }
        return response_content


@app.post("/get_product_count_prediction_from_DB")
async def get_product_count_prediction_from_DB(b_id: int,page_number :int,page_size :int):
        try:
            data = get_data(b_id,page_number,page_size,False)
        except Exception as e:
            data = str(e)
            print(str(e))
        response_content = {
            "status": "done",
            "message": "data from DB",
            "data": data,
            "status_code":200
        }
        return response_content



def find_related_products(sub_sku=None, product_name=None,forecast_data=None):
    print(type(forecast_data))
    # Filter products based on the given sub_sku or Product Name
    filtered_products = [
        item for item in forecast_data
        if (sub_sku and item['sub_sku'] == sub_sku) or
           (product_name and item['Product Name'].lower() == product_name.lower())
    ]
    return filtered_products

@app.post("/search_results_from_DB")
async def get_product_count_prediction_from_DB(
                                               b_id: int,
                                               search_sku: str = Query(None, description="The SKU of the product to search"),
                                               product_name: str = Query(None, description="The name of the product to search")
                                            ):
        try:
            related_products = find_related_products(sub_sku=search_sku, product_name=product_name,forecast_data = get_data(b_id,1,2,True))
        except Exception as e:
            related_products = str(e)
            print(str(e))
    
        response_content = {
            "status": "done",
            "message": "data from DB",
            "data": related_products,
            "status_code":200
        }
        return response_content