Spaces:
Sleeping
Sleeping
File size: 3,385 Bytes
99bbd64 afbea99 99bbd64 89ac774 99bbd64 89509c4 7dbf682 9e0b38e 99bbd64 9e0b38e 7dbf682 afbea99 7dbf682 6e78f7b fa24c7d 4aba3cf ffb1952 afbea99 a61f95d bcc36a0 4d13280 bcc36a0 24a440f 9ce2997 9e0b38e fa24c7d 24a440f 194b1f0 a3d0e12 6e78f7b 89ac774 07479a9 c7e63c1 f0a5434 e050d94 ffb1952 e050d94 ca9140a 320e565 94d87be ffb1952 4006ccd b076bad 89509c4 fa24c7d b076bad 1bd3b0e 8d08b4d e050d94 8d08b4d b076bad 1bd3b0e d21275b 4006ccd fd4a491 afbea99 a61f95d 1bd3b0e a61f95d a3d0e12 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 |
#Fast APi Packages
from fastapi import FastAPI,File
from pydantic import BaseModel
import json
#SkillExtraction Packages
import psycopg2
import pandas as pd
import numpy as np
import spacy
from sklearn.metrics.pairwise import cosine_similarity
from spacy.matcher import PhraseMatcher
from skillNer.general_params import SKILL_DB
from skillNer.skill_extractor_class import SkillExtractor
from psycopg2.extensions import register_adapter, AsIs
register_adapter(np.int64, AsIs)
import warnings
warnings.filterwarnings('ignore')
#Custom Classes for endpoints
from DbConnection import DbConnection
from UploadFile import UploadOpenFile
from SkillExtract import SkillExtractorDetails
from ExtractContentsFromFile import ExtractContentFromFile
from RemoveSkills import RemoveSkill
from AddSkillDetails import AddSkill
import ClassModals
import os
os.environ['HF_HOME'] = '/hug/cache/'
app = FastAPI()
nlp = spacy.load("en_core_web_lg")
# init skill extractor
skill_extractor = SkillExtractor(nlp, SKILL_DB, PhraseMatcher)
@app.get("/")
async def root():
return {"SkillAPI":"SkillAPi Version 0.05"}
db_params = DbConnection.GetDbConnection()
def parse_csv(df):
res = df.to_json(orient="records")
parsed = json.loads(res)
return parsed
@app.post("/UploadJobDescription/")
def UploadJobDescription(file: bytes = File(...), FileName: str = "sample.pdf"):
text= ExtractContentFromFile.ExtractDataFromFile(FileName,file)
returnID = UploadOpenFile.uploadFile(text,FileName,db_params,True)
returnSkills = SkillExtractorDetails.SkillExtract(db_params,skill_extractor,returnID)
details = returnSkills.split('@')
data = {'Data':['Required Skills', 'Soft Skills', 'Good to have Skills'], 'Values':[details[0], details[1], details[2]]}
df = pd.DataFrame(data)
return parse_csv(df)
@app.get("/AllProfileMatchResults")
def AllProfileMatchResults():
dbQuery = "select * from profilematch"
conn = psycopg2.connect(**db_params)
df = pd.read_sql_query(dbQuery, conn)
return parse_csv(df)
@app.post("/UploadOpenText/")
def UploadOpenText(file_data: ClassModals.Modals.FileDetails):
returnID = UploadOpenFile.uploadFile(file_data.filecontents,file_data.filename,db_params,file_data.IsJobDescription)
file_data.filecontents = ""
file_data.fileid = str(returnID)
file_data.message = "File Uploaded Successfully!"
return file_data
@app.post("/ExtractSkillsByJobID/")
def ExtractSkillsByJobID(skill_data: ClassModals.Modals.SkillDetails):
returnSkills = SkillExtractorDetails.SkillExtract(db_params,skill_extractor,skill_data.skillid)
details = returnSkills.split('@')
skill_data.requiredSkills = details[0]
skill_data.softSkills = details[1]
skill_data.goodToHaveSkills = details[1]
return skill_data
@app.delete("/RemoveSkillsByName/")
def RemoveSkills(SkillName : str):
RemoveSkill.RemoveSkillDetails(db_params,SkillName)
return "Skill Removed Successfully"
@app.post("/AddSkillDeails/")
def AddSkills(Skills : ClassModals.Modals.AddSkillDetails):
skilldetailsStr = Skills.SkillName + ',' + Skills.SkillType + ',' + str(Skills.SkillScore)
return AddSkill.AddSkillDetails(db_params,skilldetailsStr)
#return JSONResponse(content={"message": "Here's your interdimensional portal." , "mes1":"data2"})
#https://vaibhav84-resumeapi.hf.space/docs |