Vaibhav84's picture
'changes'
4ead886
raw
history blame
6.61 kB
#Fast APi Packages
from fastapi import FastAPI,File
from pydantic import BaseModel
import json
#SkillExtraction Packages
import psycopg2
import pandas as pd
import numpy as np
from sentence_transformers import SentenceTransformer
import spacy
from sklearn.metrics.pairwise import cosine_similarity
from spacy.matcher import PhraseMatcher
from skillNer.general_params import SKILL_DB
from skillNer.skill_extractor_class import SkillExtractor
from psycopg2.extensions import register_adapter, AsIs
register_adapter(np.int64, AsIs)
import warnings
warnings.filterwarnings('ignore')
#Custom Classes for endpoints
from DbConnection import DbConnection
from UploadFile import UploadOpenFile
from SkillExtract import SkillExtractorDetails
from ExtractContentsFromFile import ExtractContentFromFile
from RemoveSkills import RemoveSkill
from AddSkillDetails import AddSkill
from SkillMatcher import SkillMatch
import ClassModals
import os
os.environ['HF_HOME'] = '/hug/cache/'
app = FastAPI()
nlp = spacy.load("en_core_web_lg")
# init skill extractor
skill_extractor = SkillExtractor(nlp, SKILL_DB, PhraseMatcher)
model = SentenceTransformer('all-MiniLM-L6-v2')
@app.get("/")
async def root():
return {"SkillAPI":"SkillAPi Version 0.05"}
db_params = DbConnection.GetDbConnection()
def parse_csv(df):
res = df.to_json(orient="records")
parsed = json.loads(res)
return parsed
@app.post("/UploadJobDescription/")
def UploadJobDescription(file: bytes = File(...), FileName: str = "sample.pdf"):
text= ExtractContentFromFile.ExtractDataFromFile(FileName,file)
returnID = UploadOpenFile.uploadFile(text,FileName,db_params,True)
returnSkills = SkillExtractorDetails.SkillExtract(db_params,skill_extractor,returnID,True)
details = returnSkills.split('@')
data = {'Data':['Required Skills', 'Soft Skills', 'Good to have Skills'], 'Values':[details[0], details[1], details[2]]}
df = pd.DataFrame(data)
return parse_csv(df)
@app.get("/AllProfileMatchResults")
def AllProfileMatchResults():
dbQuery = "select * from profilematch"
conn = psycopg2.connect(**db_params)
df = pd.read_sql_query(dbQuery, conn)
return parse_csv(df)
@app.get("/UploadJobDescriptionOpenText/")
def UploadOpenText(text : str, filename : str):
returnID = UploadOpenFile.uploadFile(text,filename,db_params,True)
returnSkills = SkillExtractorDetails.SkillExtract(db_params,skill_extractor,returnID,True)
data = SkillMatch.SkillMatcher(model,db_params,returnID).split(';')
dataJson = {'Data':['Best Resume Fit', 'Score', 'ProfileID'], 'Values':[data[0], data[1], data[2]]}
df = pd.DataFrame(dataJson)
return parse_csv(df)
@app.post("/ExtractSkillsByJobID/")
def ExtractSkillsByJobID(skill_data: ClassModals.Modals.SkillDetails):
returnSkills = SkillExtractorDetails.SkillExtract(db_params,skill_extractor,skill_data.skillid)
details = returnSkills.split('@')
skill_data.requiredSkills = details[0]
skill_data.softSkills = details[1]
skill_data.goodToHaveSkills = details[1]
return skill_data
@app.delete("/RemoveSkillsByName/")
def RemoveSkills(SkillName : str):
RemoveSkill.RemoveSkillDetails(db_params,SkillName)
return "Skill Removed Successfully"
@app.post("/AddSkillDeails/")
def AddSkills(Skills : ClassModals.Modals.AddSkillDetails):
skilldetailsStr = Skills.SkillName + ',' + Skills.SkillType + ',' + str(Skills.SkillScore)
return AddSkill.AddSkillDetails(db_params,skilldetailsStr)
@app.post("/UploadProfileFromFile/")
def UploadProfileFromFile(file: bytes = File(...), FileName: str = "sample.pdf"):
text= ExtractContentFromFile.ExtractDataFromFile(FileName,file)
returnID = UploadOpenFile.uploadFile(text,FileName,db_params,False)
returnSkills = SkillExtractorDetails.SkillExtract(db_params,skill_extractor,returnID,False)
details = returnSkills.split('@')
data = {'Data':['Required Skills', 'Soft Skills', 'Good to have Skills'], 'Values':[details[0], details[1], details[2]]}
df = pd.DataFrame(data)
return parse_csv(df)
class FileText(BaseModel):
text: int
fname: str
@app.get("/UploadProfileOpenText/")
def UploadProfileOpenText(text : str, filename : str):
#text= ExtractContentFromFile.ExtractDataFromFile(FileName,file)
returnID = UploadOpenFile.uploadFile(text,filename,db_params,False)
returnSkills = SkillExtractorDetails.SkillExtract(db_params,skill_extractor,returnID,False)
details = returnSkills.split('@')
data = {'Data':['Required Skills', 'Soft Skills', 'Good to have Skills'], 'Values':[details[0], details[1], details[2]]}
df = pd.DataFrame(data)
return parse_csv(df)
@app.get("/GetMatchProfileByJobId/")
def GetMatchProfileByJobId(JobId : int):
data = SkillMatch.SkillMatcher(model,db_params,JobId).split(';')
dataJson = {'Data':['Best Resume Fit', 'Score', 'ProfileID'], 'Values':[data[0], data[1], data[2]]}
df = pd.DataFrame(dataJson)
return parse_csv(df)
from openai import OpenAI
@app.get("/GetOpenAPIResponse/")
def GetOpenAPIResponse():
client = OpenAI(api_key='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')
response = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[
{
"role": "system",
"content": "Summarize content you are provided in a single paragraph."
},
{
"role": "user",
"content": "Working as a secondary application owner with the Development team, on troubleshooting, analyzing data stored, its inbound and outbound core functionality using SQL Scripts and commands. Synchronized various records in the static Data Source (SSSDR), and solving issues related to the running SQL Server Instance and infrastructure. • Worked as a Database Integration Specialist with experience in Autosys Job Automation, worked towards Jil file management and troubleshooting issues related to the data administration, created end to end application flow PARP and DIRP Cutover with all server specifications, database nodes and overseeing the entire infra level application flow.• Worked as a Support application developer, analyzing issues and performing"
+"troubleshooting steps and handled exceptions through exception handling mechanism in Java."
}
],
temperature=0.7,
max_tokens=64,
top_p=1
)
print(response.choices[0].message.content)
#return JSONResponse(content={"message": "Here's your interdimensional portal." , "mes1":"data2"})
#https://vaibhav84-resumeapi.hf.space/docs