abdallah-03 commited on
Commit
5ff89f7
·
verified ·
1 Parent(s): 29a3e6a

Upload 8 files

Browse files
Files changed (8) hide show
  1. API_main.py +49 -0
  2. Amazon_scraper.py +122 -0
  3. Dockerfile +13 -0
  4. Jumia_scraper.py +85 -0
  5. main.py +40 -0
  6. parallel_execution.py +43 -0
  7. price_analysis.py +267 -0
  8. requirements.txt +0 -0
API_main.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException
2
+ from fastapi.responses import JSONResponse
3
+ from fastapi.middleware.cors import CORSMiddleware
4
+ from pydantic import BaseModel
5
+
6
+ from price_analysis import market_price_estimation
7
+
8
+ app = FastAPI(title="Market Price Analysis API")
9
+
10
+ # Add CORS middleware
11
+ app.add_middleware(
12
+ CORSMiddleware,
13
+ allow_origins=["*"],
14
+ allow_credentials=True,
15
+ allow_methods=["*"],
16
+ allow_headers=["*"],
17
+ )
18
+
19
+
20
+ class MarketEstimation(BaseModel):
21
+ product_name: str
22
+ cost_price: int
23
+ user_price: int
24
+
25
+
26
+ @app.get("/")
27
+ async def root():
28
+ return {
29
+ "message": "Welcome to the Market Prices Estimation API!",
30
+ "version": "1.0",
31
+ "endpoints": {
32
+ "/": "This welcome message",
33
+ "/market-prices-estimation/": "POST endpoint for price analysis"
34
+ }
35
+ }
36
+
37
+
38
+ @app.post("/market-prices-estimation/")
39
+ async def market_prices_estimation_endpoint(request: MarketEstimation):
40
+ try:
41
+ response = market_price_estimation(request.product_name, request.cost_price, request.user_price)
42
+
43
+ if not isinstance(response, dict):
44
+ raise ValueError("market_price_estimation must return a dictionary")
45
+
46
+ return JSONResponse(status_code=200, content=response)
47
+
48
+ except Exception as e:
49
+ raise HTTPException(status_code=500, detail=str(e))
Amazon_scraper.py ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ from bs4 import BeautifulSoup
3
+ import re
4
+ import random
5
+ import time
6
+ from difflib import SequenceMatcher
7
+
8
+
9
+ def extract_prefix_and_number(text):
10
+ match = re.search(r'([A-Za-z]+)(\d+)', text)
11
+ if match:
12
+ return match.group(1), match.group(2)
13
+ return None, None # No valid match
14
+
15
+
16
+ def similarity(a, b):
17
+ a_lower, b_lower = a.lower(), b.lower()
18
+ a_prefix, a_number = extract_prefix_and_number(a_lower)
19
+ b_prefix, b_number = extract_prefix_and_number(b_lower)
20
+ if not a_prefix or not b_prefix or not a_number or not b_number:
21
+ return 0
22
+ if (a_prefix != b_prefix) or (a_number != b_number):
23
+ return 0
24
+ if a_number not in b_lower:
25
+ return 0
26
+ base_similarity = SequenceMatcher(None, a_lower, b_lower).ratio()
27
+ return base_similarity
28
+
29
+
30
+ def parse_amazon_page(content, product_name, your_cost):
31
+ soup = BeautifulSoup(content, 'html.parser')
32
+ price_digit_limit = len(f"{your_cost}")
33
+ product_prices = []
34
+ products = soup.findAll("div", attrs={"data-component-type": "s-search-result"})
35
+
36
+ for product in products[:20]:
37
+ title = product.find("h2", attrs={"class": "a-size-base-plus"})
38
+ if not title:
39
+ continue
40
+
41
+ spans = title.findAll("span")
42
+ for span in spans:
43
+ name = span.text.strip()
44
+ similarity_score = similarity(product_name, name)
45
+ if similarity_score >= 0.0:
46
+ # Get product link
47
+ product_link = ""
48
+ link_tag = title.find_parent("a")
49
+ if link_tag and 'href' in link_tag.attrs:
50
+ product_link = "https://www.amazon.eg" + link_tag['href']
51
+
52
+ # Get image link
53
+ image_link = ""
54
+ img_tag = product.find("img", attrs={"class": "s-image"})
55
+ if img_tag and 'src' in img_tag.attrs:
56
+ image_link = img_tag['src']
57
+
58
+ price_tag = product.find("span", attrs={"class": "a-price-whole"})
59
+ if price_tag:
60
+ raw_price = price_tag.text.strip()
61
+ numeric_price = re.sub(r"[^\d]", "", raw_price)
62
+
63
+ if not numeric_price:
64
+ continue
65
+
66
+ integer_part = numeric_price.split('.')[0]
67
+ if ((len(integer_part) == price_digit_limit) or (len(integer_part) == price_digit_limit + 1)) and (
68
+ int(integer_part) > int(your_cost)):
69
+ product_prices.append((name, numeric_price, product_link, image_link))
70
+
71
+ if not product_prices:
72
+ print("Warning: No valid prices found on Amazon.")
73
+
74
+ return product_prices
75
+
76
+
77
+ def scrape_amazon(product_name, your_cost, queue, max_retries=3, retry_delay=3):
78
+ url = f"https://www.amazon.eg/s?k={product_name.replace(' ', '+')}&language=en"
79
+ print(f"Fetching: {url}")
80
+
81
+ headers = {
82
+ "User-Agent": random.choice([
83
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.5481.77 Safari/537.36",
84
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36",
85
+ "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.82 Safari/537.36"
86
+ ]),
87
+ "Accept-Language": "en-US,en;q=0.9",
88
+ "Referer": "https://www.google.com/",
89
+ "Accept-Encoding": "gzip, deflate, br",
90
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
91
+ "Connection": "keep-alive",
92
+ }
93
+
94
+ for attempt in range(max_retries):
95
+ try:
96
+ response = requests.get(url, headers=headers, timeout=10)
97
+
98
+ if response.status_code in [506, 503]:
99
+ print(
100
+ f"Error {response.status_code}. Retrying in {retry_delay} seconds... (Attempt {attempt + 1}/{max_retries})")
101
+ time.sleep(retry_delay)
102
+ continue
103
+
104
+ if response.status_code == 200:
105
+ print("Page fetched successfully with status code: 200")
106
+
107
+ results = parse_amazon_page(response.content, product_name, your_cost)
108
+ queue.put(("amazon", results))
109
+
110
+ print("Amazon results sent to queue") # Fix: Now this line runs
111
+ return results # Fix: Ensures function exits properly
112
+
113
+ else:
114
+ print(f"Unexpected status code: {response.status_code}")
115
+ queue.put([])
116
+
117
+ except requests.exceptions.RequestException as e:
118
+ print(f"An error occurred: {e}. Retrying in {retry_delay} seconds... (Attempt {attempt + 1}/{max_retries})")
119
+ time.sleep(retry_delay)
120
+
121
+ print("Failed to fetch Amazon data after multiple attempts.")
122
+ queue.put([]) # Ensure the queue gets an empty list if all retries fail
Dockerfile ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ WORKDIR /code
4
+
5
+ COPY ./requirements.txt /code/requirements.txt
6
+
7
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
8
+
9
+ COPY . /code/
10
+
11
+ EXPOSE 7860
12
+
13
+ CMD ["uvicorn", "API_main:app", "--host", "0.0.0.0", "--port", "7860"]
Jumia_scraper.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ from bs4 import BeautifulSoup
3
+ import re
4
+ import random
5
+ import time
6
+ from Amazon_scraper import similarity
7
+ import multiprocessing
8
+
9
+
10
+ def parse_jumia_page(content, product_name, your_cost):
11
+ soup = BeautifulSoup(content, 'html.parser')
12
+ price_digit_limit = len(f"{your_cost}")
13
+ product_prices = []
14
+ articles = soup.findAll("article", attrs={"class": "prd"})
15
+
16
+ for article in articles[:20]:
17
+ title = article.find("h3", attrs={"class": "name"})
18
+ if not title:
19
+ continue
20
+
21
+ name = title.text.strip()
22
+ similarity_score = similarity(product_name, name)
23
+ if similarity_score >= 0.0:
24
+ # Get product link
25
+ product_link = ""
26
+ link_tag = article.find("a")
27
+ if link_tag and 'href' in link_tag.attrs:
28
+ product_link = link_tag['href']
29
+
30
+ # Get image link with correct class name
31
+ image_link = ""
32
+ img_tag = article.find("img", attrs={"class": "img-c"})
33
+ if img_tag and 'data-src' in img_tag.attrs:
34
+ image_link = img_tag['data-src']
35
+
36
+ price_tag = article.find("div", attrs={"class": "prc"})
37
+ if price_tag:
38
+ raw_price = price_tag.text.strip()
39
+ numeric_price = re.sub(r"[^\d.]", "", raw_price)
40
+ numeric_price = numeric_price.split(".")[0]
41
+ if price_digit_limit:
42
+ if ((len(numeric_price) == price_digit_limit) or (
43
+ len(numeric_price) == price_digit_limit + 1)) and (int(numeric_price) > int(your_cost)):
44
+ product_prices.append((name, numeric_price, product_link, image_link))
45
+ continue
46
+
47
+ return product_prices
48
+
49
+ def scrape_jumia(product_name, your_cost, queue, max_retries=5, retry_delay=5):
50
+ url = f"https://www.jumia.com.eg/catalog/?q={product_name.replace(' ', '+')}"
51
+ # print(url)
52
+ headers = {
53
+ "User-Agent": random.choice([
54
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.5481.77 Safari/537.36",
55
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36",
56
+ "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.82 Safari/537.36"
57
+ ]),
58
+ "Accept-Language": "en-US,en;q=0.9"
59
+ }
60
+
61
+ for attempt in range(max_retries):
62
+ try:
63
+ response = requests.get(url, headers=headers, timeout=10)
64
+ if response.status_code in [506, 503]:
65
+ print(
66
+ f"Error {response.status_code}. Retrying in {retry_delay} seconds... (Attempt {attempt + 1}/{max_retries})")
67
+ time.sleep(retry_delay)
68
+ continue
69
+ if response.status_code == 200:
70
+ print(f"Page fetched successfully with status code: {response.status_code}")
71
+ # time.sleep(1.5)
72
+ results = parse_jumia_page(response.content, product_name, your_cost)
73
+ queue.put(("jumia", results))
74
+ print("jumia results sent to queue")
75
+ return results
76
+ else:
77
+ print(f"Unexpected status code: {response.status_code}")
78
+ queue.put([])
79
+ except requests.exceptions.RequestException as e:
80
+ print(f"An error occurred: {e}. Retrying in {retry_delay} seconds... (Attempt {attempt + 1}/{max_retries})")
81
+ time.sleep(retry_delay)
82
+ queue.put([]) # Empty list after retries failed
83
+
84
+
85
+
main.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from parallel_execution import scrape_product_multiprocessing
2
+ from Amazon_scraper import scrape_amazon
3
+ from Jumia_scraper import scrape_jumia
4
+ import numpy as np
5
+ from price_analysis import market_price_estimation,get_products_list
6
+ from Amazon_scraper import scrape_amazon
7
+ from PIL import Image # To open the saved image for preview
8
+
9
+ if __name__ == '__main__':
10
+ product_name = 'soundcore r50 nc'
11
+ cost_price = 1000
12
+ user_price = 1500
13
+
14
+ # Get prices from web scrapers
15
+ response = market_price_estimation(product_name, cost_price, user_price)
16
+ print(response)
17
+ # print("Scraped Prices:", prices)
18
+ # min_price, avg_price, max_price = get_MinMaxAverage(prices)
19
+ # Generate image
20
+ # image_buffer = plot_your_price(user_price, min_price,max_price,avg_price)
21
+
22
+ # if image_buffer is None:
23
+ # print("Error: Could not generate price comparison plot.")
24
+ # else:
25
+ # # Save to a file
26
+ # with open("price_comparison.png", "wb") as f:
27
+ # f.write(image_buffer.getvalue())
28
+ #
29
+ # print("Image saved as 'price_comparison.png'")
30
+ #
31
+ # # Open image for preview (optional)
32
+ # img = Image.open("price_comparison.png")
33
+ # img.show()
34
+
35
+ #
36
+ # recommendations = recommend_price(min_price,avg_price,max_price,user_price, cost_price,prices)
37
+ # for key, value in recommendations.items():
38
+ # print(f"{key}: {value}")
39
+ # ans = get_prices_analysis(prices,cost_price,user_price)
40
+ # print(ans)
parallel_execution.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import multiprocessing
2
+ from Jumia_scraper import scrape_jumia
3
+ from Amazon_scraper import scrape_amazon
4
+ import time
5
+
6
+ def scrape_product_multiprocessing(product_name,your_cost):
7
+ queue = multiprocessing.Queue()
8
+
9
+ # Create processes
10
+ p1 = multiprocessing.Process(target=scrape_amazon, args=(product_name, your_cost, queue))
11
+ p2 = multiprocessing.Process(target=scrape_jumia, args=(product_name, your_cost, queue))
12
+
13
+ # Start processes
14
+ p1.start()
15
+ p2.start()
16
+
17
+ # Wait for processes to complete
18
+ p1.join()
19
+ p2.join()
20
+
21
+ # Debugging: Check the queue size after processes finish
22
+ print(f"Queue size after both processes finish: {queue.qsize()}")
23
+
24
+ # Retrieve results from queue
25
+ results_amazon = []
26
+ results_jumia = []
27
+
28
+ # Check if queue has results
29
+ while not queue.empty():
30
+ try:
31
+
32
+ site, results = queue.get()
33
+ # print(f"Results from {site}: {results}") # Debugging output
34
+ if site == 'amazon':
35
+ results_amazon = results
36
+ elif site == 'jumia':
37
+ results_jumia = results
38
+
39
+ except Exception as e:
40
+ print(e)
41
+
42
+ # Return results and total time
43
+ return results_amazon, results_jumia
price_analysis.py ADDED
@@ -0,0 +1,267 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from parallel_execution import scrape_product_multiprocessing
2
+ import numpy as np
3
+ import matplotlib.pyplot as plt
4
+ import io
5
+ from typing import List, Dict, Any
6
+ import json
7
+ import re
8
+
9
+ prices_list = []
10
+ # min_price=0
11
+ # avg_price=0
12
+ # max_price = 0
13
+ from typing import List, Any, Tuple
14
+
15
+
16
+ def loop_prices(source_list: List[List[Any]], source_name: str) -> Tuple[List[dict], List[int]]:
17
+ products = []
18
+ prices_list = []
19
+
20
+ for item in source_list:
21
+ try:
22
+ product_name = item[0]
23
+ price = int(item[1])
24
+ product_link = item[2] if len(item) > 2 else ""
25
+ image_link = item[3] if len(item) > 3 else ""
26
+
27
+ product = {
28
+ "product_name": product_name,
29
+ "source": source_name,
30
+ "price": price,
31
+ "product link": product_link,
32
+ "image link": image_link
33
+ }
34
+ products.append(product)
35
+ prices_list.append(price)
36
+ except (ValueError, IndexError, TypeError):
37
+ print(f"Error: Skipping invalid price data from {source_name}")
38
+
39
+ return products, prices_list
40
+
41
+ def get_products_list(product_name: str, your_cost: float):
42
+ amazon, jumia = scrape_product_multiprocessing(product_name, your_cost)
43
+
44
+ products = []
45
+ all_prices = []
46
+
47
+ amazon_products, amazon_prices = loop_prices(amazon, "Amazon")
48
+ jumia_products, jumia_prices = loop_prices(jumia, "Jumia")
49
+
50
+ products.extend(amazon_products)
51
+ products.extend(jumia_products)
52
+
53
+ all_prices.extend(amazon_prices)
54
+ all_prices.extend(jumia_prices)
55
+
56
+ return products, all_prices
57
+
58
+ # def generate_prompt(product_name, your_cost):
59
+ # products, _ = get_products_list(product_name, your_cost)
60
+ #
61
+ # prompt = (f"Filter the given product list to include only closely related products."
62
+ # f" The response should only be the filtered product list in valid JSON format, without any explanations or additional text."
63
+ # f" Ensure the output is a properly formatted JSON array of dictionaries."
64
+ # f" Real product name: {product_name}\nProduct list: {json.dumps(products, ensure_ascii=False, indent=2)}\n"
65
+ # f"Example JSON output:\n"
66
+ # f"["
67
+ # f"{{\"product_name\": \"Soundcore R50i NC Wireless Bluetooth Headphones - Black\", \"source\": \"Amazon\", \"price\": 1150}},"
68
+ # f"{{\"product_name\": \"Soundcore R50i NC True Wireless Earbuds 10mm Drivers with Big Bass, Bluetooth 5.3, 45H Playtime, IP54-Sweatguard Waterproof, AI Clear Calls with 4 Mics, 22 Preset EQs via App-Black\", \"source\": \"Amazon\", \"price\": 1390}},"
69
+ # f"{{\"product_name\": \"Soundcore R50i NC True Wireless Earbuds 10mm Drivers with Big Bass, Bluetooth 5.3, 45H Playtime, IP54-Sweatguard Waterproof, AI Clear Calls with 4 Mics, 22 Preset EQs via App-White\", \"source\": \"Amazon\", \"price\": 1713}},"
70
+ # f"{{\"product_name\": \"Soundcore R50i NC True Wireless Earbuds with Big Bass, Bluetooth 5.3, 45H Playtime, IP54-Sweatguard Waterproof, AI Clear Calls with 4 Mics, 22 Preset EQs via App-GREEN Local warranty\", \"source\": \"Amazon\", \"price\": 1550}}"
71
+ # f"]"
72
+ # )
73
+ #
74
+ # return prompt
75
+
76
+ # def get_filtered_product_list(product_name, your_cost):
77
+ # prompt = generate_prompt(product_name, your_cost)
78
+ #
79
+ # genai.configure(api_key="AIzaSyAzp-WRPAi4IaALmpjyRh2yo0qsPmFMxdI")
80
+ # model = genai.GenerativeModel("gemini-2.0-flash")
81
+ # response = model.generate_content(prompt)
82
+ #
83
+ # try:
84
+ # # Parse response into JSON
85
+ # filtered_products = json.loads(response.text)
86
+ # if isinstance(filtered_products, list):
87
+ # return filtered_products
88
+ # else:
89
+ # return []
90
+ # except json.JSONDecodeError:
91
+ # return []
92
+
93
+
94
+ # def extract_json(response_text):
95
+ # match = re.search(r"\[.*\]", response_text, re.DOTALL) # Extracts JSON part
96
+ # if match:
97
+ # return match.group(0)
98
+ # return None
99
+ #
100
+ # def extract_prices(response_text):
101
+ # json_text = extract_json(response_text)
102
+ # if not json_text:
103
+ # return "Invalid API response: No JSON found"
104
+ #
105
+ # try:
106
+ # products = json.loads(json_text) # Convert JSON string to Python list
107
+ # prices = [product["price"] for product in products if "price" in product]
108
+ # return prices
109
+ # except json.JSONDecodeError as e:
110
+ # return f"Invalid JSON format: {e}"
111
+
112
+ def remove_outliers(prices, multiplier=1.0):
113
+ if not prices:
114
+ print("Warning: The prices list is empty. Returning an empty list.")
115
+ return []
116
+ try:
117
+ prices = list(map(int, prices))
118
+ except ValueError:
119
+ print("Error: Could not convert prices to integers. Check data format.")
120
+ return []
121
+
122
+ if len(prices) < 2:
123
+ print("Warning: Not enough data points to compute outliers.")
124
+ return prices
125
+
126
+ Q1 = np.percentile(prices, 25)
127
+ Q3 = np.percentile(prices, 75)
128
+ IQR = Q3 - Q1
129
+ lower_bound = Q1 - multiplier * IQR
130
+ upper_bound = Q3 + multiplier * IQR
131
+ return [price for price in prices if lower_bound <= price <= upper_bound]
132
+
133
+
134
+ def get_MinMaxAverage(updated_price_list):
135
+ filtered_prices = remove_outliers(updated_price_list)
136
+
137
+ if not filtered_prices:
138
+ print("Error: No valid prices available.")
139
+ return None, None, None
140
+
141
+ try:
142
+ filtered_prices = list(map(int, filtered_prices))
143
+ mini = np.min(list(map(int, updated_price_list)))
144
+ maxi = np.max(list(map(int, updated_price_list)))
145
+ average = round(np.mean(filtered_prices), 2)
146
+ except ValueError:
147
+ print("Error: Could not compute min/max/average due to invalid data.")
148
+ return None, None, None
149
+
150
+ return mini, average, maxi
151
+
152
+ def normalize(price, min_price, max_price):
153
+ if min_price is None or max_price is None:
154
+ print("Error: Cannot normalize due to missing price data.")
155
+ return np.pi / 2
156
+ if min_price == max_price:
157
+ return np.pi / 2
158
+ return np.pi - ((price - min_price) / (max_price - min_price) * np.pi)
159
+
160
+
161
+
162
+ def plot_your_price(your_price, min_price,max_price,avg_price):
163
+
164
+
165
+ if min_price is None or max_price is None or avg_price is None:
166
+ print("Error: Cannot plot due to missing price data.")
167
+ return None # Return None if the image cannot be generated
168
+
169
+ fig, ax = plt.subplots(figsize=(8, 4), subplot_kw={'projection': 'polar'})
170
+
171
+ # Create the three segments (Min = Red, Mid = Yellow, Max = Green)
172
+ ax.barh(1, np.pi / 3, left=2 * np.pi / 3, color='red', height=0.5)
173
+ ax.barh(1, np.pi / 3, left=np.pi / 3, color='yellow', height=0.5)
174
+ ax.barh(1, np.pi / 3, left=0, color='green', height=0.5)
175
+
176
+ # Plot your price marker
177
+ norm_price = normalize(your_price, min_price, max_price)
178
+ ax.plot([norm_price, norm_price], [0, 1], color="black", linewidth=3, marker="o", markersize=10)
179
+
180
+ # Labels
181
+ ax.text(np.pi + 0.1, 1.2, f"Min: {int(min_price)}", ha="center", fontsize=10, color="black", fontweight="bold")
182
+ ax.text(np.pi / 2, 1.2, f"Avg: {int(avg_price)}", ha="center", fontsize=10, color="black", fontweight="bold")
183
+ ax.text(-0.1, 1.2, f"Max: {int(max_price)}", ha="center", fontsize=10, color="black", fontweight="bold")
184
+
185
+ # Final styling
186
+ plt.title("Your Price Compared to Market", fontsize=12, fontweight="bold", color="black")
187
+ ax.set_xticks([])
188
+ ax.set_yticks([])
189
+ ax.set_frame_on(False)
190
+ plt.show()
191
+ # Save the plot to an in-memory buffer
192
+ img_buffer = io.BytesIO()
193
+ plt.savefig(img_buffer, format="png", bbox_inches="tight") # Save the figure to the buffer
194
+ plt.close(fig)
195
+ img_buffer.seek(0)
196
+
197
+ return img_buffer
198
+
199
+ def recommend_price(min_price, avg_price, max_price, user_price, user_cost, price_list):
200
+ # Calculate quartiles
201
+ q1 = float(np.percentile(price_list, 25))
202
+ q3 = float(np.percentile(price_list, 75))
203
+
204
+ # Price Adjustment Suggestion based on market range
205
+ if user_price < min_price:
206
+ price_suggestion = f"Your price is too low. Consider increasing it to at least {min_price}."
207
+ elif user_price > max_price:
208
+ price_suggestion = f"Your price is too high. Consider lowering it below {max_price}."
209
+ else:
210
+ price_suggestion = "Your price is competitive in the market."
211
+
212
+ # Market Competitiveness Rating
213
+ if user_price < q1:
214
+ competitiveness = "Very Cheap (Consider increasing your price!)"
215
+ elif q1 <= user_price <= q3:
216
+ competitiveness = "Competitive (Good price in the market)"
217
+ else:
218
+ competitiveness = "Expensive (Consider lowering your price)"
219
+
220
+ # Recommended Selling Price Range (ensuring at least 10% profit)
221
+ recommended_price = max(user_cost * 1.1, q1) # Ensure minimum profit of 10%
222
+ recommended_range = (round(recommended_price, 2), round(q3, 2))
223
+
224
+ # Relationship between User Price and Average Price
225
+ if user_price < avg_price:
226
+ avg_relation = f"Your price is below the average market price ({avg_price}). You may have room to increase it."
227
+ elif user_price > avg_price:
228
+ avg_relation = f"Your price is above the average market price ({avg_price}). Ensure your product quality justifies the price."
229
+ else:
230
+ avg_relation = "Your price matches the average market price."
231
+
232
+ # Profit Calculation
233
+ profit_margin = user_price - user_cost
234
+ profit_percentage = (profit_margin / user_cost) * 100 if user_cost > 0 else 0
235
+
236
+ return {
237
+ "min_price": min_price,
238
+ "max_price": max_price,
239
+ "avg_price": avg_price,
240
+ "user_price": user_price,
241
+ "price_suggestion": price_suggestion,
242
+ "competitiveness": competitiveness,
243
+ "recommended_range": recommended_range,
244
+ "avg_relation": avg_relation,
245
+ "profit_margin": f"{round(profit_margin, 2)} EGP",
246
+ "profit_percentage": f"{round(profit_percentage, 2)}%"
247
+ }
248
+
249
+
250
+ def get_prices_analysis(prices, cost_price, user_price):
251
+ prices = [float(p) for p in prices]
252
+ min_price, avg_price, max_price = get_MinMaxAverage(prices)
253
+ min_price, avg_price, max_price = int(min_price), float(avg_price), int(max_price)
254
+ # image_buffer = plot_your_price(user_price, min_price, max_price, avg_price)
255
+ recommendations = recommend_price(min_price, avg_price, max_price, user_price, cost_price, prices)
256
+ recommendations["recommended_range"] = tuple(map(float, recommendations["recommended_range"]))
257
+ return recommendations
258
+
259
+
260
+ def market_price_estimation(product_name , cost_price , user_price):
261
+ products,prices = get_products_list(product_name,cost_price)
262
+ recommendations = get_prices_analysis(prices, cost_price, user_price)
263
+ response={
264
+ "products": products,
265
+ "recommendations": recommendations,
266
+ }
267
+ return response
requirements.txt ADDED
File without changes