File size: 6,560 Bytes
99bbd64
d034937
99bbd64
89ac774
99bbd64
d034937
 
 
 
 
 
 
 
99bbd64
 
89509c4
7dbf682
2885677
9e0b38e
99bbd64
9e0b38e
 
 
7dbf682
 
 
 
 
afbea99
7dbf682
6e78f7b
fa24c7d
4aba3cf
ffb1952
afbea99
a61f95d
2885677
38021f1
a61f95d
bcc36a0
4d13280
bcc36a0
24a440f
9ce2997
9e0b38e
 
 
2885677
fa24c7d
d034937
 
 
 
 
 
 
 
 
24a440f
 
ea9181c
0d4ebbf
89ac774
 
 
 
07479a9
ea9181c
8f0089b
ea9181c
 
 
 
 
 
 
c7e63c1
d034937
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f0a5434
634a79e
 
 
11e22b7
634a79e
 
cad2718
38021f1
4006ccd
fd4a491
afbea99
b6ee5b6
afbea99
a61f95d
a647161
1bd3b0e
 
b6ee5b6
a61f95d
55d2736
a647161
ce2b503
a647161
 
55d2736
29aed4f
d375dac
55d2736
d034937
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a3d0e12
634a79e
746a354
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
#Fast APi Packages
from fastapi import FastAPI,File, HTTPException
from pydantic import BaseModel
import json


from typing import List, Dict, Any
import pandas as pd
import numpy as np
from sklearn.metrics.pairwise import cosine_similarity
from scipy import sparse
from datetime import datetime

#SkillExtraction Packages
import psycopg2
import pandas as pd
import numpy as np
from sentence_transformers import SentenceTransformer
import spacy
from sklearn.metrics.pairwise import cosine_similarity
from spacy.matcher import PhraseMatcher
from skillNer.general_params import SKILL_DB
from skillNer.skill_extractor_class import SkillExtractor
from psycopg2.extensions import register_adapter, AsIs
register_adapter(np.int64, AsIs)
import warnings
warnings.filterwarnings('ignore')


#Custom Classes for endpoints
from DbConnection import DbConnection
from UploadFile import UploadOpenFile
from SkillExtract import SkillExtractorDetails
from ExtractContentsFromFile import ExtractContentFromFile
from RemoveSkills import RemoveSkill
from AddSkillDetails import AddSkill
from SkillMatcher import SkillMatch
from SkillExtractV1 import SkillExtractorDetailsV1
import ClassModals
import os
os.environ['HF_HOME'] = '/hug/cache/'

app = FastAPI()
         
nlp = spacy.load("en_core_web_lg")
    # init skill extractor
skill_extractor = SkillExtractor(nlp, SKILL_DB, PhraseMatcher)
model = SentenceTransformer('all-MiniLM-L6-v2')

purchase_history = pd.read_excel('datasetsample.xlsx', sheet_name='Transaction History', 
                               parse_dates=['Purchase_Date'])
purchase_history['Customer_Id'] = purchase_history['Customer_Id'].astype(str)
product_categories = purchase_history[['Product_Id', 'Category']].drop_duplicates().set_index('Product_Id')['Category'].to_dict()
purchase_counts = purchase_history.groupby(['Customer_Id', 'Product_Id']).size().unstack(fill_value=0)
sparse_purchase_counts = sparse.csr_matrix(purchase_counts)
cosine_similarities = cosine_similarity(sparse_purchase_counts.T)


@app.get("/")
async def root():
 return {"Recommendation":"Recommendation Version 1.00, https://vaibhav84-recommendation.hf.space/redoc , https://vaibhav84-recommendation.hf.space/docs"}
 
def parse_csv(df):
    res = df.to_json(orient="records")
    parsed = json.loads(res)
    return parsed

@app.post("/CustomerLogin/")
def UploadJobDescription(CustomerID : str, CustomerPwd: str):
    try:   
        if CustomerID != "" and CustomerPwd == (CustomerID + "123"):
            return "Login Successful"   
        else:
            return "Login Failed"        
    except Exception as e:
        return "An error occurred: {e}" 

@app.get("/recommendations/{customer_id}")
async def get_recommendations(customer_id: str, n: int = 5):
    """
    Get recommendations for a customer
    
    Parameters:
    - customer_id: The ID of the customer
    - n: Number of recommendations to return (default: 5)
    
    Returns:
    - JSON object containing purchase history and recommendations
    """
    try:
        purchased_items, recommended_items = get_customer_items_and_recommendations(customer_id, n)
        
        return {
            "customer_id": customer_id,
            "purchase_history": purchased_items,
            "recommendations": recommended_items
        }
    except Exception as e:
        raise HTTPException(status_code=404, detail=f"Error processing customer ID: {customer_id}. {str(e)}")



@app.post("/UploadJobDescription/")
async def UploadJobDescription(file: bytes =  File(...), FileName: str = "sample.pdf"):
    try:   
        text= ExtractContentFromFile.ExtractDataFromFile(FileName,file)
        returnSkills = SkillExtractorDetailsV1.GetSkillData(skill_extractor,text)     
        return parse_csv(returnSkills) 
    except Exception as e:
        return "An error occurred: {e}" 


@app.delete("/RemoveSkillsByName/")
def RemoveSkills(SkillName : str):    
    RemoveSkill.RemoveSkillDetails(SkillName)
    return "Skill Removed Successfully"

@app.post("/AddSkillDetails/")
def AddSkills(Skills : ClassModals.Modals.AddSkillDetails):    
    skilldetailsStr = Skills.SkillName + ',' + Skills.SkillType + ',' + str(Skills.SkillScore)    
    return AddSkill.AddSkillDetails(skilldetailsStr)

@app.put("/UpdateSkillDetails/")
def UpdateSkills(Skills : ClassModals.Modals.UpdateSkillDetails):    
    skilldetailsStr = Skills.SkillName + ',' + str(Skills.SkillWeightage)
    return AddSkill.UpdateSkillDetails(skilldetailsStr)

@app.get("/GetAllSkillDetails/")
def AllSkills():
    return (AddSkill.GetSkillDetails())


def get_customer_items_and_recommendations(user_id: str, n: int = 5) -> tuple[List[Dict], List[Dict]]:
    """
    Get both purchased items and recommendations for a user
    """
    user_id = str(user_id)
    
    if user_id not in purchase_counts.index:
        return [], []
    
    purchased_items = list(purchase_counts.columns[purchase_counts.loc[user_id] > 0])
    
    purchased_items_info = []
    user_purchases = purchase_history[purchase_history['Customer_Id'] == user_id]
    
    for item in purchased_items:
        item_purchases = user_purchases[user_purchases['Product_Id'] == item]
        total_amount = float(item_purchases['Amount (In Dollars)'].sum())
        last_purchase = pd.to_datetime(item_purchases['Purchase_Date'].max())
        category = product_categories.get(item, 'Unknown')
        purchased_items_info.append({
            'product_id': item,
            'category': category,
            'total_amount': total_amount,
            'last_purchase': last_purchase.strftime('%Y-%m-%d')
        })
    
    user_idx = purchase_counts.index.get_loc(user_id)
    user_history = sparse_purchase_counts[user_idx].toarray().flatten()
    similarities = cosine_similarities.dot(user_history)
    purchased_indices = np.where(user_history > 0)[0]
    similarities[purchased_indices] = 0
    recommended_indices = np.argsort(similarities)[::-1][:n]
    recommended_items = list(purchase_counts.columns[recommended_indices])
    recommended_items = [item for item in recommended_items if item not in purchased_items]
    
    recommended_items_info = [
        {
            'product_id': item,
            'category': product_categories.get(item, 'Unknown')
        }
        for item in recommended_items
    ]

    return purchased_items_info, recommended_items_info
#return JSONResponse(content={"message": "Here's your interdimensional portal." , "mes1":"data2"})
#https://vaibhav84-resumeapi.hf.space/docs
#https://vaibhav84-resumeapi.hf.space/redoc d