Vaibhav84 commited on
Commit
41d24fb
·
1 Parent(s): 925e026
Files changed (1) hide show
  1. app.py +56 -116
app.py CHANGED
@@ -2,137 +2,52 @@
2
  from fastapi import FastAPI,File, HTTPException
3
  from pydantic import BaseModel
4
  import json
5
-
6
-
7
  from typing import List, Dict, Any
8
  import pandas as pd
9
  import numpy as np
10
  from sklearn.metrics.pairwise import cosine_similarity
11
  from scipy import sparse
12
  from datetime import datetime
13
-
14
- #SkillExtraction Packages
15
- import psycopg2
16
- import pandas as pd
17
- import numpy as np
18
- from sentence_transformers import SentenceTransformer
19
- import spacy
20
- from sklearn.metrics.pairwise import cosine_similarity
21
- from spacy.matcher import PhraseMatcher
22
- from skillNer.general_params import SKILL_DB
23
- from skillNer.skill_extractor_class import SkillExtractor
24
- from psycopg2.extensions import register_adapter, AsIs
25
- register_adapter(np.int64, AsIs)
26
  import warnings
27
- warnings.filterwarnings('ignore')
 
28
 
 
29
 
30
- #Custom Classes for endpoints
31
- from DbConnection import DbConnection
32
- from UploadFile import UploadOpenFile
33
- from SkillExtract import SkillExtractorDetails
34
- from ExtractContentsFromFile import ExtractContentFromFile
35
- from RemoveSkills import RemoveSkill
36
- from AddSkillDetails import AddSkill
37
- from SkillMatcher import SkillMatch
38
- from SkillExtractV1 import SkillExtractorDetailsV1
39
- import ClassModals
40
- import os
41
- os.environ['HF_HOME'] = '/hug/cache/'
42
 
43
  app = FastAPI()
44
-
45
- nlp = spacy.load("en_core_web_lg")
46
- # init skill extractor
47
- skill_extractor = SkillExtractor(nlp, SKILL_DB, PhraseMatcher)
48
- model = SentenceTransformer('all-MiniLM-L6-v2')
49
 
50
  # Get the current directory path
51
  current_dir = os.path.dirname(os.path.abspath(__file__))
52
  excel_path = os.path.join(current_dir, 'datasetsample.xlsx')
53
 
54
- purchase_history = pd.read_excel(excel_path, sheet_name='Transaction History',
55
- parse_dates=['Purchase_Date'])
56
- purchase_history['Customer_Id'] = purchase_history['Customer_Id'].astype(str)
57
- product_categories = purchase_history[['Product_Id', 'Category']].drop_duplicates().set_index('Product_Id')['Category'].to_dict()
58
- purchase_counts = purchase_history.groupby(['Customer_Id', 'Product_Id']).size().unstack(fill_value=0)
59
- sparse_purchase_counts = sparse.csr_matrix(purchase_counts)
60
- cosine_similarities = cosine_similarity(sparse_purchase_counts.T)
61
-
62
-
63
- @app.get("/")
64
- async def root():
65
- return {"Recommendation":"Recommendation Version 1.00, https://vaibhav84-recommendation.hf.space/redoc , https://vaibhav84-recommendation.hf.space/docs"}
66
-
67
- def parse_csv(df):
68
- res = df.to_json(orient="records")
69
- parsed = json.loads(res)
70
- return parsed
71
-
72
- @app.post("/CustomerLogin/")
73
- def UploadJobDescription(CustomerID : str, CustomerPwd: str):
74
- try:
75
- if CustomerID != "" and CustomerPwd == (CustomerID + "123"):
76
- return "Login Successful"
77
- else:
78
- return "Login Failed"
79
- except Exception as e:
80
- return "An error occurred: {e}"
81
-
82
- @app.get("/recommendations/{customer_id}")
83
- async def get_recommendations(customer_id: str, n: int = 5):
84
- """
85
- Get recommendations for a customer
86
 
87
- Parameters:
88
- - customer_id: The ID of the customer
89
- - n: Number of recommendations to return (default: 5)
 
 
90
 
91
- Returns:
92
- - JSON object containing purchase history and recommendations
93
- """
94
- try:
95
- purchased_items, recommended_items = get_customer_items_and_recommendations(customer_id, n)
96
-
97
- return {
98
- "customer_id": customer_id,
99
- "purchase_history": purchased_items,
100
- "recommendations": recommended_items
101
- }
102
- except Exception as e:
103
- raise HTTPException(status_code=404, detail=f"Error processing customer ID: {customer_id}. {str(e)}")
104
-
105
-
106
-
107
- @app.post("/UploadJobDescription/")
108
- async def UploadJobDescription(file: bytes = File(...), FileName: str = "sample.pdf"):
109
- try:
110
- text= ExtractContentFromFile.ExtractDataFromFile(FileName,file)
111
- returnSkills = SkillExtractorDetailsV1.GetSkillData(skill_extractor,text)
112
- return parse_csv(returnSkills)
113
- except Exception as e:
114
- return "An error occurred: {e}"
115
-
116
-
117
- @app.delete("/RemoveSkillsByName/")
118
- def RemoveSkills(SkillName : str):
119
- RemoveSkill.RemoveSkillDetails(SkillName)
120
- return "Skill Removed Successfully"
121
-
122
- @app.post("/AddSkillDetails/")
123
- def AddSkills(Skills : ClassModals.Modals.AddSkillDetails):
124
- skilldetailsStr = Skills.SkillName + ',' + Skills.SkillType + ',' + str(Skills.SkillScore)
125
- return AddSkill.AddSkillDetails(skilldetailsStr)
126
-
127
- @app.put("/UpdateSkillDetails/")
128
- def UpdateSkills(Skills : ClassModals.Modals.UpdateSkillDetails):
129
- skilldetailsStr = Skills.SkillName + ',' + str(Skills.SkillWeightage)
130
- return AddSkill.UpdateSkillDetails(skilldetailsStr)
131
-
132
- @app.get("/GetAllSkillDetails/")
133
- def AllSkills():
134
- return (AddSkill.GetSkillDetails())
135
-
136
 
137
  def get_customer_items_and_recommendations(user_id: str, n: int = 5) -> tuple[List[Dict], List[Dict]]:
138
  """
@@ -178,6 +93,31 @@ def get_customer_items_and_recommendations(user_id: str, n: int = 5) -> tuple[Li
178
  ]
179
 
180
  return purchased_items_info, recommended_items_info
181
- #return JSONResponse(content={"message": "Here's your interdimensional portal." , "mes1":"data2"})
182
- #https://vaibhav84-resumeapi.hf.space/docs
183
- #https://vaibhav84-resumeapi.hf.space/redoc d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  from fastapi import FastAPI,File, HTTPException
3
  from pydantic import BaseModel
4
  import json
 
 
5
  from typing import List, Dict, Any
6
  import pandas as pd
7
  import numpy as np
8
  from sklearn.metrics.pairwise import cosine_similarity
9
  from scipy import sparse
10
  from datetime import datetime
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  import warnings
12
+ import os
13
+ import logging
14
 
15
+ warnings.filterwarnings('ignore')
16
 
17
+ # Set up logging
18
+ logging.basicConfig(level=logging.INFO)
19
+ logger = logging.getLogger(__name__)
 
 
 
 
 
 
 
 
 
20
 
21
  app = FastAPI()
 
 
 
 
 
22
 
23
  # Get the current directory path
24
  current_dir = os.path.dirname(os.path.abspath(__file__))
25
  excel_path = os.path.join(current_dir, 'datasetsample.xlsx')
26
 
27
+ # Log the file path and directory contents for debugging
28
+ logger.info(f"Current directory: {current_dir}")
29
+ logger.info(f"Excel path: {excel_path}")
30
+ logger.info("Directory contents:")
31
+ for file in os.listdir(current_dir):
32
+ logger.info(f"- {file}")
33
+
34
+ try:
35
+ # Load the data when the application starts
36
+ purchase_history = pd.read_excel(excel_path, sheet_name='Transaction History',
37
+ parse_dates=['Purchase_Date'])
38
+ logger.info("Successfully loaded Excel file")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
 
40
+ purchase_history['Customer_Id'] = purchase_history['Customer_Id'].astype(str)
41
+ product_categories = purchase_history[['Product_Id', 'Category']].drop_duplicates().set_index('Product_Id')['Category'].to_dict()
42
+ purchase_counts = purchase_history.groupby(['Customer_Id', 'Product_Id']).size().unstack(fill_value=0)
43
+ sparse_purchase_counts = sparse.csr_matrix(purchase_counts)
44
+ cosine_similarities = cosine_similarity(sparse_purchase_counts.T)
45
 
46
+ logger.info("Data processing completed successfully")
47
+
48
+ except Exception as e:
49
+ logger.error(f"Error loading or processing data: {str(e)}")
50
+ raise
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51
 
52
  def get_customer_items_and_recommendations(user_id: str, n: int = 5) -> tuple[List[Dict], List[Dict]]:
53
  """
 
93
  ]
94
 
95
  return purchased_items_info, recommended_items_info
96
+
97
+ @app.get("/")
98
+ async def root():
99
+ return {"message": "Welcome to the Recommendation API"}
100
+
101
+ @app.get("/recommendations/{customer_id}")
102
+ async def get_recommendations(customer_id: str, n: int = 5):
103
+ """
104
+ Get recommendations for a customer
105
+
106
+ Parameters:
107
+ - customer_id: The ID of the customer
108
+ - n: Number of recommendations to return (default: 5)
109
+
110
+ Returns:
111
+ - JSON object containing purchase history and recommendations
112
+ """
113
+ try:
114
+ purchased_items, recommended_items = get_customer_items_and_recommendations(customer_id, n)
115
+
116
+ return {
117
+ "customer_id": customer_id,
118
+ "purchase_history": purchased_items,
119
+ "recommendations": recommended_items
120
+ }
121
+ except Exception as e:
122
+ logger.error(f"Error processing request for customer {customer_id}: {str(e)}")
123
+ raise HTTPException(status_code=404, detail=f"Error processing customer ID: {customer_id}. {str(e)}")