Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import os
|
3 |
+
import fitz # PyMuPDF
|
4 |
+
from langchain.embeddings import HuggingFaceEmbeddings
|
5 |
+
from langchain.vectorstores import FAISS
|
6 |
+
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
7 |
+
from langchain_groq import ChatGroq
|
8 |
+
from langchain_core.prompts import ChatPromptTemplate
|
9 |
+
|
10 |
+
# Function to extract text from PDF
|
11 |
+
def extract_text_from_pdf(pdf_path):
|
12 |
+
document = fitz.open(pdf_path)
|
13 |
+
text = ""
|
14 |
+
for page_num in range(len(document)):
|
15 |
+
page = document.load_page(page_num)
|
16 |
+
text += page.get_text("text")
|
17 |
+
return text
|
18 |
+
|
19 |
+
# Function to split text into chunks
|
20 |
+
def split_text_into_chunks(text, chunk_size=1000, chunk_overlap=200):
|
21 |
+
text_splitter = RecursiveCharacterTextSplitter(
|
22 |
+
chunk_size=chunk_size,
|
23 |
+
chunk_overlap=chunk_overlap,
|
24 |
+
length_function=len,
|
25 |
+
)
|
26 |
+
chunks = text_splitter.split_text(text)
|
27 |
+
return chunks
|
28 |
+
|
29 |
+
# Function to create vector store
|
30 |
+
def create_vector_store(chunks):
|
31 |
+
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
|
32 |
+
vector_store = FAISS.from_texts(chunks, embeddings)
|
33 |
+
return vector_store
|
34 |
+
|
35 |
+
# Function to perform RAG
|
36 |
+
def perform_rag(vector_store, query):
|
37 |
+
retriever = vector_store.as_retriever()
|
38 |
+
docs = retriever.get_relevant_documents(query)
|
39 |
+
context = " ".join([doc.page_content for doc in docs])
|
40 |
+
return context
|
41 |
+
|
42 |
+
# Function to parse output
|
43 |
+
def parse_output(output):
|
44 |
+
# Directly access the content of the AIMessage object
|
45 |
+
output_content = output.content
|
46 |
+
start_index = output_content.find('content="') + len('content="')
|
47 |
+
end_index = output_content.rfind('"')
|
48 |
+
parsed_content = output_content[start_index:end_index]
|
49 |
+
parsed_content = parsed_content.replace('\\n', '\n')
|
50 |
+
return parsed_content
|
51 |
+
|
52 |
+
# Create Gradio interface function with topic as input
|
53 |
+
def generate_cover_letter(pdf_path, job_role, company_name, company_context, groq_api_key):
|
54 |
+
# Set the API key as an environment variable
|
55 |
+
os.environ["GROQ_API_KEY"] = groq_api_key
|
56 |
+
|
57 |
+
# Extract text from PDF
|
58 |
+
text = extract_text_from_pdf(pdf_path)
|
59 |
+
# Split text into chunks
|
60 |
+
chunks = split_text_into_chunks(text)
|
61 |
+
# Create vector store
|
62 |
+
vector_store = create_vector_store(chunks)
|
63 |
+
# Perform RAG
|
64 |
+
candidate_profile = perform_rag(vector_store, job_role)
|
65 |
+
|
66 |
+
# Load the Groq model
|
67 |
+
chat = ChatGroq(
|
68 |
+
temperature=0.5,
|
69 |
+
model="llama3-70b-8192", #define model available on Groq
|
70 |
+
api_key=os.getenv("GROQ_API_KEY")
|
71 |
+
)
|
72 |
+
|
73 |
+
# Define the prompt
|
74 |
+
system = "You are expert career coach and consultant. You will generate well made and proper cover letetr based on user profile and user input."
|
75 |
+
human = """
|
76 |
+
So, I am applying for {job_role} at {company_name}
|
77 |
+
=================
|
78 |
+
{company_context}
|
79 |
+
=================
|
80 |
+
{candidate_profile}
|
81 |
+
=================
|
82 |
+
From the company profile and my profile, please create a cover letter for the {job_role} position. Ensure that it is well-crafted and engaging for recruiters and hiring managers. Also, verify that my recent work experience and academic background align with the role I am applying for.
|
83 |
+
"""
|
84 |
+
prompt = ChatPromptTemplate.from_messages([("system", system), ("human", human)])
|
85 |
+
|
86 |
+
# Define LangChain
|
87 |
+
chain = prompt | chat
|
88 |
+
output = chain.invoke({"job_role": job_role, "company_name": company_name, "company_context": company_context, "candidate_profile": candidate_profile})
|
89 |
+
# Parse output
|
90 |
+
parsed_output = parse_output(output)
|
91 |
+
|
92 |
+
return parsed_output
|
93 |
+
|
94 |
+
# Create Gradio interface
|
95 |
+
gr.Interface(
|
96 |
+
fn=generate_cover_letter,
|
97 |
+
inputs=[
|
98 |
+
gr.File(label="Upload ATS Resume (PDF)", file_types=[".pdf"]),
|
99 |
+
gr.Textbox(label="Job Role", placeholder="Ex: Data Scientist, Fullstack Developer, etc."),
|
100 |
+
gr.Textbox(label="Company Name", placeholder="Enter a company name you applying"),
|
101 |
+
gr.Textbox(label="Company Context", placeholder="Enter a brief description of the company"),
|
102 |
+
gr.Textbox(label="Groq API Key", type="password", placeholder="Enter your Groq API Key")
|
103 |
+
],
|
104 |
+
outputs=gr.Textbox(label="Generated Cover Letter", show_copy_button=True),
|
105 |
+
title="Cover Letter Generator",
|
106 |
+
description="Generate a cover letter based on your job role, company, context, and profile."
|
107 |
+
).launch()
|