File size: 3,230 Bytes
bb7c702 518bab7 bb7c702 2c07988 5a4a17c 2c07988 411ecf4 bb7c702 ce0876e bb7c702 cc94f1d bb7c702 2c07988 95d2ae3 518bab7 95d2ae3 518bab7 cc94f1d 95d2ae3 518bab7 95d2ae3 518bab7 bb7c702 2c07988 bb7c702 518bab7 bb7c702 518bab7 bb7c702 518bab7 bb7c702 518bab7 2c07988 bb7c702 95d2ae3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 |
import openai
import os
from paperqa import Docs
import gradio as gr
from langchain.document_loaders import PyPDFLoader
from langchain.vectorstores import Chroma
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.document_loaders import UnstructuredPDFLoader
from langchain.llms import OpenAI
from langchain.chains.question_answering import load_qa_chain
from langchain.chat_models import ChatOpenAI
# Set your OpenAI API key here
OPENAI_API_KEY = 'sk-proj-'
os.environ['OPENAI_API_KEY'] = OPENAI_API_KEY
css_style = """
.gradio-container {
font-family: "IBM Plex Mono";
}
.answerText p {
font-size: 24px !important;
color: #8dbcfe !important;
}
"""
def run(uploaded_files):
all_files = []
if uploaded_files is None:
return all_files
for file in uploaded_files:
if file.name.endswith('.pdf'):
all_files.append(file.name)
print(f"File uploaded: {file.name}, Size: {file.size} bytes")
return all_files
def createAnswer(files, designation):
# Initialize the Docs object with the OpenAI API key
docs = Docs(llm='gpt-3.5-turbo')
# Add the uploaded files to the Docs object
for d in files:
try:
docs.add(d.name)
print(f"Successfully added file: {d.name}")
except Exception as e:
print(f"Error adding file {d.name}: {e}")
return f"Error reading file {d.name}. Please ensure the file is not empty or corrupted."
# Query the documents to find the best candidate for the given designation
answer = docs.query(
f"Who is the best candidate to hire for {designation}. Provide a list with the candidate name. If you don't know, simply say None of the candidates are suited for the Job role."
)
print(answer.formatted_answer)
print(type(answer))
return answer.answer
with gr.Blocks(css=css_style) as demo:
gr.Markdown(f"""
# HR-GPT - Filter & Find The Best Candidate for the Job using AI
*By Amin Memon ([@AminMemon](https://twitter.com/AminMemon))*
This tool will enable asking questions of your uploaded text, PDF documents,.
It uses OpenAI's ChatGPT model & OpenAI Embeddings and thus you must enter your API key below.
This tool is under active development and currently uses many tokens - up to 10,000
for a single query. That is $0.10-0.20 per query, so please be careful!
Porting it to Llama.cpp soon for saved cost.
1. Upload your Resumes (Try a few resumes/cv to try < 5)
2. Provide Designation for which you are hiring
""")
position = gr.Text(
label='Position/Designation for which you are hiring for', value="")
with gr.Tab('File Upload'):
uploaded_files = gr.File(
label="Resume Upload - ONLY PDF. (Doc File Support Coming Soon)", file_count="multiple", show_progress=True)
uploaded_files.change(
fn=run, inputs=[uploaded_files], outputs=[uploaded_files])
ask = gr.Button("Find Top Candidate")
answer = gr.Markdown(label="Result", elem_classes='answerText')
ask.click(fn=createAnswer, inputs=[
uploaded_files, position], outputs=[answer])
demo.queue(concurrency_count=20)
demo.launch(show_error=True)
|