Spaces:
Sleeping
Sleeping
#Fast APi Packages | |
from fastapi import FastAPI,File, HTTPException | |
from pydantic import BaseModel | |
import json | |
from typing import List, Dict, Any | |
import pandas as pd | |
import numpy as np | |
from sklearn.metrics.pairwise import cosine_similarity | |
from scipy import sparse | |
from datetime import datetime | |
#SkillExtraction Packages | |
import psycopg2 | |
import pandas as pd | |
import numpy as np | |
from sentence_transformers import SentenceTransformer | |
import spacy | |
from sklearn.metrics.pairwise import cosine_similarity | |
from spacy.matcher import PhraseMatcher | |
from skillNer.general_params import SKILL_DB | |
from skillNer.skill_extractor_class import SkillExtractor | |
from psycopg2.extensions import register_adapter, AsIs | |
register_adapter(np.int64, AsIs) | |
import warnings | |
warnings.filterwarnings('ignore') | |
#Custom Classes for endpoints | |
from DbConnection import DbConnection | |
from UploadFile import UploadOpenFile | |
from SkillExtract import SkillExtractorDetails | |
from ExtractContentsFromFile import ExtractContentFromFile | |
from RemoveSkills import RemoveSkill | |
from AddSkillDetails import AddSkill | |
from SkillMatcher import SkillMatch | |
from SkillExtractV1 import SkillExtractorDetailsV1 | |
import ClassModals | |
import os | |
os.environ['HF_HOME'] = '/hug/cache/' | |
app = FastAPI() | |
nlp = spacy.load("en_core_web_lg") | |
# init skill extractor | |
skill_extractor = SkillExtractor(nlp, SKILL_DB, PhraseMatcher) | |
model = SentenceTransformer('all-MiniLM-L6-v2') | |
purchase_history = pd.read_excel('datasetsample.xlsx', sheet_name='Transaction History', | |
parse_dates=['Purchase_Date']) | |
purchase_history['Customer_Id'] = purchase_history['Customer_Id'].astype(str) | |
product_categories = purchase_history[['Product_Id', 'Category']].drop_duplicates().set_index('Product_Id')['Category'].to_dict() | |
purchase_counts = purchase_history.groupby(['Customer_Id', 'Product_Id']).size().unstack(fill_value=0) | |
sparse_purchase_counts = sparse.csr_matrix(purchase_counts) | |
cosine_similarities = cosine_similarity(sparse_purchase_counts.T) | |
async def root(): | |
return {"Recommendation":"Recommendation Version 1.00, https://vaibhav84-recommendation.hf.space/redoc , https://vaibhav84-recommendation.hf.space/docs"} | |
def parse_csv(df): | |
res = df.to_json(orient="records") | |
parsed = json.loads(res) | |
return parsed | |
def UploadJobDescription(CustomerID : str, CustomerPwd: str): | |
try: | |
if CustomerID != "" and CustomerPwd == (CustomerID + "123"): | |
return "Login Successful" | |
else: | |
return "Login Failed" | |
except Exception as e: | |
return "An error occurred: {e}" | |
async def get_recommendations(customer_id: str, n: int = 5): | |
""" | |
Get recommendations for a customer | |
Parameters: | |
- customer_id: The ID of the customer | |
- n: Number of recommendations to return (default: 5) | |
Returns: | |
- JSON object containing purchase history and recommendations | |
""" | |
try: | |
purchased_items, recommended_items = get_customer_items_and_recommendations(customer_id, n) | |
return { | |
"customer_id": customer_id, | |
"purchase_history": purchased_items, | |
"recommendations": recommended_items | |
} | |
except Exception as e: | |
raise HTTPException(status_code=404, detail=f"Error processing customer ID: {customer_id}. {str(e)}") | |
async def UploadJobDescription(file: bytes = File(...), FileName: str = "sample.pdf"): | |
try: | |
text= ExtractContentFromFile.ExtractDataFromFile(FileName,file) | |
returnSkills = SkillExtractorDetailsV1.GetSkillData(skill_extractor,text) | |
return parse_csv(returnSkills) | |
except Exception as e: | |
return "An error occurred: {e}" | |
def RemoveSkills(SkillName : str): | |
RemoveSkill.RemoveSkillDetails(SkillName) | |
return "Skill Removed Successfully" | |
def AddSkills(Skills : ClassModals.Modals.AddSkillDetails): | |
skilldetailsStr = Skills.SkillName + ',' + Skills.SkillType + ',' + str(Skills.SkillScore) | |
return AddSkill.AddSkillDetails(skilldetailsStr) | |
def UpdateSkills(Skills : ClassModals.Modals.UpdateSkillDetails): | |
skilldetailsStr = Skills.SkillName + ',' + str(Skills.SkillWeightage) | |
return AddSkill.UpdateSkillDetails(skilldetailsStr) | |
def AllSkills(): | |
return (AddSkill.GetSkillDetails()) | |
def get_customer_items_and_recommendations(user_id: str, n: int = 5) -> tuple[List[Dict], List[Dict]]: | |
""" | |
Get both purchased items and recommendations for a user | |
""" | |
user_id = str(user_id) | |
if user_id not in purchase_counts.index: | |
return [], [] | |
purchased_items = list(purchase_counts.columns[purchase_counts.loc[user_id] > 0]) | |
purchased_items_info = [] | |
user_purchases = purchase_history[purchase_history['Customer_Id'] == user_id] | |
for item in purchased_items: | |
item_purchases = user_purchases[user_purchases['Product_Id'] == item] | |
total_amount = float(item_purchases['Amount (In Dollars)'].sum()) | |
last_purchase = pd.to_datetime(item_purchases['Purchase_Date'].max()) | |
category = product_categories.get(item, 'Unknown') | |
purchased_items_info.append({ | |
'product_id': item, | |
'category': category, | |
'total_amount': total_amount, | |
'last_purchase': last_purchase.strftime('%Y-%m-%d') | |
}) | |
user_idx = purchase_counts.index.get_loc(user_id) | |
user_history = sparse_purchase_counts[user_idx].toarray().flatten() | |
similarities = cosine_similarities.dot(user_history) | |
purchased_indices = np.where(user_history > 0)[0] | |
similarities[purchased_indices] = 0 | |
recommended_indices = np.argsort(similarities)[::-1][:n] | |
recommended_items = list(purchase_counts.columns[recommended_indices]) | |
recommended_items = [item for item in recommended_items if item not in purchased_items] | |
recommended_items_info = [ | |
{ | |
'product_id': item, | |
'category': product_categories.get(item, 'Unknown') | |
} | |
for item in recommended_items | |
] | |
return purchased_items_info, recommended_items_info | |
#return JSONResponse(content={"message": "Here's your interdimensional portal." , "mes1":"data2"}) | |
#https://vaibhav84-resumeapi.hf.space/docs | |
#https://vaibhav84-resumeapi.hf.space/redoc d |