Spaces:
Running
Running
Upload app.py
Browse files
app.py
CHANGED
@@ -60,7 +60,29 @@ class resume_analyzer:
|
|
60 |
return chunks
|
61 |
|
62 |
|
63 |
-
def
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
64 |
query = f''' need to detailed summarization of below resume and finally conclude them
|
65 |
|
66 |
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
@@ -70,7 +92,51 @@ class resume_analyzer:
|
|
70 |
return query
|
71 |
|
72 |
|
73 |
-
def
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
74 |
query = f'''need to detailed analysis and explain of the strength of below resume and finally conclude them
|
75 |
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
76 |
{query_with_chunks}
|
@@ -79,7 +145,55 @@ class resume_analyzer:
|
|
79 |
return query
|
80 |
|
81 |
|
82 |
-
def
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
83 |
query = f'''need to detailed analysis and explain of the weakness of below resume and how to improve make a better resume.
|
84 |
|
85 |
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
@@ -89,7 +203,55 @@ class resume_analyzer:
|
|
89 |
return query
|
90 |
|
91 |
|
92 |
-
def
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
93 |
|
94 |
query = f''' what are the job roles i apply to likedin based on below?
|
95 |
|
@@ -100,25 +262,53 @@ class resume_analyzer:
|
|
100 |
return query
|
101 |
|
102 |
|
103 |
-
def
|
104 |
|
105 |
-
|
106 |
-
embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key)
|
107 |
|
108 |
-
|
109 |
-
|
|
|
|
|
110 |
|
111 |
-
|
112 |
-
|
|
|
|
|
|
|
113 |
|
114 |
-
|
115 |
-
|
|
|
116 |
|
117 |
-
|
118 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
119 |
|
120 |
-
response = chain.run(input_documents=docs, question=analyze)
|
121 |
-
return response
|
122 |
|
123 |
|
124 |
class linkedin_scraper:
|
@@ -141,11 +331,13 @@ class linkedin_scraper:
|
|
141 |
with st.form(key='linkedin_scarp'):
|
142 |
|
143 |
add_vertical_space(1)
|
144 |
-
col1,col2 = st.columns([0.
|
145 |
with col1:
|
146 |
-
|
147 |
-
|
148 |
with col2:
|
|
|
|
|
149 |
job_count = st.number_input(label='Job Count', min_value=1, value=1, step=1)
|
150 |
|
151 |
# Submit Button
|
@@ -153,10 +345,10 @@ class linkedin_scraper:
|
|
153 |
submit = st.form_submit_button(label='Submit')
|
154 |
add_vertical_space(1)
|
155 |
|
156 |
-
return
|
157 |
|
158 |
|
159 |
-
def build_url(job_title):
|
160 |
|
161 |
b = []
|
162 |
for i in job_title:
|
@@ -165,7 +357,7 @@ class linkedin_scraper:
|
|
165 |
b.append(y)
|
166 |
|
167 |
job_title = '%2C%20'.join(b)
|
168 |
-
link = f"https://in.linkedin.com/jobs/search?keywords={job_title}&location=
|
169 |
|
170 |
return link
|
171 |
|
@@ -203,10 +395,14 @@ class linkedin_scraper:
|
|
203 |
matched_words = list(set(user_input).intersection(set(scrap_title)))
|
204 |
|
205 |
# Return Job Title if there are more than 1 matched word else return NaN
|
206 |
-
|
|
|
|
|
|
|
|
|
207 |
|
208 |
|
209 |
-
def scrap_company_data(driver, job_title_input):
|
210 |
|
211 |
# scraping the Company Data
|
212 |
company = driver.find_elements(by=By.CSS_SELECTOR, value='h4[class="base-search-card__subtitle"]')
|
@@ -230,6 +426,9 @@ class linkedin_scraper:
|
|
230 |
# Return Job Title if there are more than 1 matched word else return NaN
|
231 |
df['Job Title'] = df['Job Title'].apply(lambda x: linkedin_scraper.job_title_filter(x, job_title_input))
|
232 |
|
|
|
|
|
|
|
233 |
# Drop Null Values and Reset Index
|
234 |
df = df.dropna()
|
235 |
df.reset_index(drop=True, inplace=True)
|
@@ -305,30 +504,32 @@ class linkedin_scraper:
|
|
305 |
def main():
|
306 |
|
307 |
# Initially set driver to None
|
308 |
-
|
309 |
|
310 |
-
|
311 |
-
job_title_input, job_count, submit = linkedin_scraper.get_userinput()
|
312 |
add_vertical_space(2)
|
313 |
-
|
314 |
if submit:
|
315 |
-
if job_title_input != '':
|
316 |
|
317 |
-
with st.spinner('Webdriver Setup Initializing...'):
|
318 |
driver = linkedin_scraper.webdriver_setup()
|
319 |
|
320 |
-
with st.spinner('
|
321 |
|
322 |
# build URL based on User Job Title Input
|
323 |
-
link = linkedin_scraper.build_url(job_title_input)
|
324 |
|
325 |
# Open the Link in LinkedIn and Scroll Down the Page
|
326 |
linkedin_scraper.link_open_scrolldown(driver, link, job_count)
|
327 |
|
328 |
-
with st.spinner('scraping
|
329 |
-
|
330 |
-
|
331 |
-
|
|
|
|
|
332 |
df_final = linkedin_scraper. scrap_job_description(driver, df, job_count)
|
333 |
|
334 |
# Display the Data in User Interface
|
@@ -336,26 +537,30 @@ class linkedin_scraper:
|
|
336 |
|
337 |
|
338 |
# If User Click Submit Button and Job Title is Empty
|
339 |
-
elif job_title_input ==
|
340 |
st.markdown(f'<h5 style="text-align: center;color: orange;">Job Title is Empty</h5>',
|
341 |
unsafe_allow_html=True)
|
|
|
|
|
|
|
|
|
342 |
|
343 |
-
|
344 |
-
|
345 |
-
|
346 |
|
347 |
-
|
348 |
-
|
349 |
-
|
350 |
|
351 |
|
352 |
|
353 |
# Streamlit Configuration Setup
|
354 |
streamlit_config()
|
355 |
-
add_vertical_space(
|
|
|
356 |
|
357 |
|
358 |
-
# sidebar
|
359 |
with st.sidebar:
|
360 |
|
361 |
add_vertical_space(3)
|
@@ -364,113 +569,39 @@ with st.sidebar:
|
|
364 |
icons=['house-fill', 'database-fill', 'pass-fill', 'list-ul', 'linkedin', 'sign-turn-right-fill'])
|
365 |
|
366 |
|
367 |
-
if option == 'Summary':
|
368 |
-
|
369 |
-
# file upload
|
370 |
-
pdf = st.file_uploader(label='', type='pdf')
|
371 |
-
openai_api_key = st.text_input(label='OpenAI API Key', type='password')
|
372 |
|
373 |
-
|
374 |
-
if pdf is not None and openai_api_key is not None:
|
375 |
-
pdf_chunks = resume_analyzer.pdf_to_chunks(pdf)
|
376 |
|
377 |
-
|
378 |
-
result_summary = resume_analyzer.openai(openai_api_key=openai_api_key, chunks=pdf_chunks, analyze=summary)
|
379 |
|
380 |
-
st.subheader('Summary:')
|
381 |
-
st.write(result_summary)
|
382 |
-
|
383 |
-
except Exception as e:
|
384 |
-
add_vertical_space(2)
|
385 |
-
st.markdown(f'<h5 style="text-align: center;color: orange;">{e}</h5>', unsafe_allow_html=True)
|
386 |
|
387 |
|
388 |
elif option == 'Strength':
|
389 |
|
390 |
-
|
391 |
-
pdf = st.file_uploader(label='', type='pdf')
|
392 |
-
openai_api_key = st.text_input(label='OpenAI API Key', type='password')
|
393 |
-
|
394 |
-
try:
|
395 |
-
if pdf is not None and openai_api_key is not None:
|
396 |
-
|
397 |
-
pdf_chunks = resume_analyzer.pdf_to_chunks(pdf)
|
398 |
-
|
399 |
-
# Resume summary
|
400 |
-
summary = resume_analyzer.resume_summary(query_with_chunks=pdf_chunks)
|
401 |
-
result_summary = resume_analyzer.openai(openai_api_key=openai_api_key, chunks=pdf_chunks, analyze=summary)
|
402 |
|
403 |
-
strength = resume_analyzer.resume_strength(query_with_chunks=result_summary)
|
404 |
-
result_strength = resume_analyzer.openai(openai_api_key=openai_api_key, chunks=pdf_chunks, analyze=strength)
|
405 |
-
|
406 |
-
st.subheader('Strength:')
|
407 |
-
st.write(result_strength)
|
408 |
-
|
409 |
-
except Exception as e:
|
410 |
-
add_vertical_space(2)
|
411 |
-
st.markdown(f'<h5 style="text-align: center;color: orange;">{e}</h5>', unsafe_allow_html=True)
|
412 |
|
413 |
|
414 |
elif option == 'Weakness':
|
415 |
|
416 |
-
|
417 |
-
pdf = st.file_uploader(label='', type='pdf')
|
418 |
-
openai_api_key = st.text_input(label='OpenAI API Key', type='password')
|
419 |
-
|
420 |
-
try:
|
421 |
-
if pdf is not None and openai_api_key is not None:
|
422 |
-
|
423 |
-
pdf_chunks = resume_analyzer.pdf_to_chunks(pdf)
|
424 |
|
425 |
-
# Resume summary
|
426 |
-
summary = resume_analyzer.resume_summary(query_with_chunks=pdf_chunks)
|
427 |
-
result_summary = resume_analyzer.openai(openai_api_key=openai_api_key, chunks=pdf_chunks, analyze=summary)
|
428 |
-
|
429 |
-
weakness = resume_analyzer.resume_weakness(query_with_chunks=result_summary)
|
430 |
-
result_weakness = resume_analyzer.openai(openai_api_key=openai_api_key, chunks=pdf_chunks, analyze=weakness)
|
431 |
-
|
432 |
-
st.subheader('Weakness:')
|
433 |
-
st.write(result_weakness)
|
434 |
-
|
435 |
-
except Exception as e:
|
436 |
-
add_vertical_space(2)
|
437 |
-
st.markdown(f'<h5 style="text-align: center;color: orange;">{e}</h5>', unsafe_allow_html=True)
|
438 |
|
439 |
|
440 |
elif option == 'Job Titles':
|
441 |
|
442 |
-
|
443 |
-
pdf = st.file_uploader(label='', type='pdf')
|
444 |
-
openai_api_key = st.text_input(label='OpenAI API Key', type='password')
|
445 |
|
446 |
-
try:
|
447 |
-
if pdf is not None and openai_api_key is not None:
|
448 |
-
pdf_chunks = resume_analyzer.pdf_to_chunks(pdf)
|
449 |
-
|
450 |
-
# Resume summary
|
451 |
-
summary = resume_analyzer.resume_summary(query_with_chunks=pdf_chunks)
|
452 |
-
result_summary = resume_analyzer.openai(openai_api_key=openai_api_key, chunks=pdf_chunks, analyze=summary)
|
453 |
-
|
454 |
-
job_suggestion = resume_analyzer.job_title_suggestion(query_with_chunks=result_summary)
|
455 |
-
result_suggestion = resume_analyzer.openai(openai_api_key=openai_api_key, chunks=pdf_chunks, analyze=job_suggestion)
|
456 |
-
|
457 |
-
st.subheader('Suggestion: ')
|
458 |
-
st.write(result_suggestion)
|
459 |
-
|
460 |
-
except Exception as e:
|
461 |
-
add_vertical_space(2)
|
462 |
-
st.markdown(f'<h5 style="text-align: center;color: orange;">{e}</h5>', unsafe_allow_html=True)
|
463 |
|
464 |
|
465 |
elif option == 'Linkedin Jobs':
|
466 |
-
|
467 |
-
add_vertical_space(2)
|
468 |
linkedin_scraper.main()
|
469 |
|
470 |
|
|
|
471 |
elif option == 'Exit':
|
472 |
|
473 |
-
add_vertical_space(3)
|
474 |
col1, col2, col3 = st.columns([0.3,0.4,0.3])
|
475 |
with col2:
|
476 |
st.success('Thank you for your time. Exiting the application')
|
|
|
60 |
return chunks
|
61 |
|
62 |
|
63 |
+
def openai(openai_api_key, chunks, analyze):
|
64 |
+
|
65 |
+
# Using OpenAI service for embedding
|
66 |
+
embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key)
|
67 |
+
|
68 |
+
# Facebook AI Similarity Serach library help us to convert text data to numerical vector
|
69 |
+
vectorstores = FAISS.from_texts(chunks, embedding=embeddings)
|
70 |
+
|
71 |
+
# compares the query and chunks, enabling the selection of the top 'K' most similar chunks based on their similarity scores.
|
72 |
+
docs = vectorstores.similarity_search(query=analyze, k=3)
|
73 |
+
|
74 |
+
# creates an OpenAI object, using the ChatGPT 3.5 Turbo model
|
75 |
+
llm = ChatOpenAI(model='gpt-3.5-turbo', api_key=openai_api_key)
|
76 |
+
|
77 |
+
# question-answering (QA) pipeline, making use of the load_qa_chain function
|
78 |
+
chain = load_qa_chain(llm=llm, chain_type='stuff')
|
79 |
+
|
80 |
+
response = chain.run(input_documents=docs, question=analyze)
|
81 |
+
return response
|
82 |
+
|
83 |
+
|
84 |
+
def summary_prompt(query_with_chunks):
|
85 |
+
|
86 |
query = f''' need to detailed summarization of below resume and finally conclude them
|
87 |
|
88 |
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
|
|
92 |
return query
|
93 |
|
94 |
|
95 |
+
def resume_summary():
|
96 |
+
|
97 |
+
with st.form(key='Summary'):
|
98 |
+
|
99 |
+
# User Upload the Resume
|
100 |
+
add_vertical_space(1)
|
101 |
+
pdf = st.file_uploader(label='Upload Your Resume', type='pdf')
|
102 |
+
add_vertical_space(1)
|
103 |
+
|
104 |
+
# Enter OpenAI API Key
|
105 |
+
col1,col2 = st.columns([0.6,0.4])
|
106 |
+
with col1:
|
107 |
+
openai_api_key = st.text_input(label='Enter OpenAI API Key', type='password')
|
108 |
+
add_vertical_space(2)
|
109 |
+
|
110 |
+
# Click on Submit Button
|
111 |
+
submit = st.form_submit_button(label='Submit')
|
112 |
+
add_vertical_space(1)
|
113 |
+
|
114 |
+
add_vertical_space(3)
|
115 |
+
if submit:
|
116 |
+
if pdf is not None and openai_api_key != '':
|
117 |
+
try:
|
118 |
+
with st.spinner('Processing...'):
|
119 |
+
|
120 |
+
pdf_chunks = resume_analyzer.pdf_to_chunks(pdf)
|
121 |
+
|
122 |
+
summary_prompt = resume_analyzer.summary_prompt(query_with_chunks=pdf_chunks)
|
123 |
+
|
124 |
+
summary = resume_analyzer.openai(openai_api_key=openai_api_key, chunks=pdf_chunks, analyze=summary_prompt)
|
125 |
+
|
126 |
+
st.markdown(f'<h4 style="color: orange;">Summary:</h4>', unsafe_allow_html=True)
|
127 |
+
st.write(summary)
|
128 |
+
|
129 |
+
except Exception as e:
|
130 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">{e}</h5>', unsafe_allow_html=True)
|
131 |
+
|
132 |
+
elif pdf is None:
|
133 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">Please Upload Your Resume</h5>', unsafe_allow_html=True)
|
134 |
+
|
135 |
+
elif openai_api_key == '':
|
136 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">Please Enter OpenAI API Key</h5>', unsafe_allow_html=True)
|
137 |
+
|
138 |
+
|
139 |
+
def strength_prompt(query_with_chunks):
|
140 |
query = f'''need to detailed analysis and explain of the strength of below resume and finally conclude them
|
141 |
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
142 |
{query_with_chunks}
|
|
|
145 |
return query
|
146 |
|
147 |
|
148 |
+
def resume_strength():
|
149 |
+
|
150 |
+
with st.form(key='Strength'):
|
151 |
+
|
152 |
+
# User Upload the Resume
|
153 |
+
add_vertical_space(1)
|
154 |
+
pdf = st.file_uploader(label='Upload Your Resume', type='pdf')
|
155 |
+
add_vertical_space(1)
|
156 |
+
|
157 |
+
# Enter OpenAI API Key
|
158 |
+
col1,col2 = st.columns([0.6,0.4])
|
159 |
+
with col1:
|
160 |
+
openai_api_key = st.text_input(label='Enter OpenAI API Key', type='password')
|
161 |
+
add_vertical_space(2)
|
162 |
+
|
163 |
+
# Click on Submit Button
|
164 |
+
submit = st.form_submit_button(label='Submit')
|
165 |
+
add_vertical_space(1)
|
166 |
+
|
167 |
+
add_vertical_space(3)
|
168 |
+
if submit:
|
169 |
+
if pdf is not None and openai_api_key != '':
|
170 |
+
try:
|
171 |
+
with st.spinner('Processing...'):
|
172 |
+
|
173 |
+
pdf_chunks = resume_analyzer.pdf_to_chunks(pdf)
|
174 |
+
|
175 |
+
summary_prompt = resume_analyzer.summary_prompt(query_with_chunks=pdf_chunks)
|
176 |
+
|
177 |
+
summary = resume_analyzer.openai(openai_api_key=openai_api_key, chunks=pdf_chunks, analyze=summary_prompt)
|
178 |
+
|
179 |
+
strength_prompt = resume_analyzer.strength_prompt(query_with_chunks=summary)
|
180 |
+
|
181 |
+
strength = resume_analyzer.openai(openai_api_key=openai_api_key, chunks=pdf_chunks, analyze=strength_prompt)
|
182 |
+
|
183 |
+
st.markdown(f'<h4 style="color: orange;">Strength:</h4>', unsafe_allow_html=True)
|
184 |
+
st.write(strength)
|
185 |
+
|
186 |
+
except Exception as e:
|
187 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">{e}</h5>', unsafe_allow_html=True)
|
188 |
+
|
189 |
+
elif pdf is None:
|
190 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">Please Upload Your Resume</h5>', unsafe_allow_html=True)
|
191 |
+
|
192 |
+
elif openai_api_key == '':
|
193 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">Please Enter OpenAI API Key</h5>', unsafe_allow_html=True)
|
194 |
+
|
195 |
+
|
196 |
+
def weakness_prompt(query_with_chunks):
|
197 |
query = f'''need to detailed analysis and explain of the weakness of below resume and how to improve make a better resume.
|
198 |
|
199 |
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
|
|
203 |
return query
|
204 |
|
205 |
|
206 |
+
def resume_weakness():
|
207 |
+
|
208 |
+
with st.form(key='Weakness'):
|
209 |
+
|
210 |
+
# User Upload the Resume
|
211 |
+
add_vertical_space(1)
|
212 |
+
pdf = st.file_uploader(label='Upload Your Resume', type='pdf')
|
213 |
+
add_vertical_space(1)
|
214 |
+
|
215 |
+
# Enter OpenAI API Key
|
216 |
+
col1,col2 = st.columns([0.6,0.4])
|
217 |
+
with col1:
|
218 |
+
openai_api_key = st.text_input(label='Enter OpenAI API Key', type='password')
|
219 |
+
add_vertical_space(2)
|
220 |
+
|
221 |
+
# Click on Submit Button
|
222 |
+
submit = st.form_submit_button(label='Submit')
|
223 |
+
add_vertical_space(1)
|
224 |
+
|
225 |
+
add_vertical_space(3)
|
226 |
+
if submit:
|
227 |
+
if pdf is not None and openai_api_key != '':
|
228 |
+
try:
|
229 |
+
with st.spinner('Processing...'):
|
230 |
+
|
231 |
+
pdf_chunks = resume_analyzer.pdf_to_chunks(pdf)
|
232 |
+
|
233 |
+
summary_prompt = resume_analyzer.summary_prompt(query_with_chunks=pdf_chunks)
|
234 |
+
|
235 |
+
summary = resume_analyzer.openai(openai_api_key=openai_api_key, chunks=pdf_chunks, analyze=summary_prompt)
|
236 |
+
|
237 |
+
weakness_prompt = resume_analyzer.weakness_prompt(query_with_chunks=summary)
|
238 |
+
|
239 |
+
weakness = resume_analyzer.openai(openai_api_key=openai_api_key, chunks=pdf_chunks, analyze=weakness_prompt)
|
240 |
+
|
241 |
+
st.markdown(f'<h4 style="color: orange;">Weakness and Suggestions:</h4>', unsafe_allow_html=True)
|
242 |
+
st.write(weakness)
|
243 |
+
|
244 |
+
except Exception as e:
|
245 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">{e}</h5>', unsafe_allow_html=True)
|
246 |
+
|
247 |
+
elif pdf is None:
|
248 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">Please Upload Your Resume</h5>', unsafe_allow_html=True)
|
249 |
+
|
250 |
+
elif openai_api_key == '':
|
251 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">Please Enter OpenAI API Key</h5>', unsafe_allow_html=True)
|
252 |
+
|
253 |
+
|
254 |
+
def job_title_prompt(query_with_chunks):
|
255 |
|
256 |
query = f''' what are the job roles i apply to likedin based on below?
|
257 |
|
|
|
262 |
return query
|
263 |
|
264 |
|
265 |
+
def job_title_suggestion():
|
266 |
|
267 |
+
with st.form(key='Job Titles'):
|
|
|
268 |
|
269 |
+
# User Upload the Resume
|
270 |
+
add_vertical_space(1)
|
271 |
+
pdf = st.file_uploader(label='Upload Your Resume', type='pdf')
|
272 |
+
add_vertical_space(1)
|
273 |
|
274 |
+
# Enter OpenAI API Key
|
275 |
+
col1,col2 = st.columns([0.6,0.4])
|
276 |
+
with col1:
|
277 |
+
openai_api_key = st.text_input(label='Enter OpenAI API Key', type='password')
|
278 |
+
add_vertical_space(2)
|
279 |
|
280 |
+
# Click on Submit Button
|
281 |
+
submit = st.form_submit_button(label='Submit')
|
282 |
+
add_vertical_space(1)
|
283 |
|
284 |
+
add_vertical_space(3)
|
285 |
+
if submit:
|
286 |
+
if pdf is not None and openai_api_key != '':
|
287 |
+
try:
|
288 |
+
with st.spinner('Processing...'):
|
289 |
+
|
290 |
+
pdf_chunks = resume_analyzer.pdf_to_chunks(pdf)
|
291 |
+
|
292 |
+
summary_prompt = resume_analyzer.summary_prompt(query_with_chunks=pdf_chunks)
|
293 |
+
|
294 |
+
summary = resume_analyzer.openai(openai_api_key=openai_api_key, chunks=pdf_chunks, analyze=summary_prompt)
|
295 |
+
|
296 |
+
job_title_prompt = resume_analyzer.job_title_prompt(query_with_chunks=summary)
|
297 |
+
|
298 |
+
job_title = resume_analyzer.openai(openai_api_key=openai_api_key, chunks=pdf_chunks, analyze=job_title_prompt)
|
299 |
+
|
300 |
+
st.markdown(f'<h4 style="color: orange;">Job Titles:</h4>', unsafe_allow_html=True)
|
301 |
+
st.write(job_title)
|
302 |
+
|
303 |
+
except Exception as e:
|
304 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">{e}</h5>', unsafe_allow_html=True)
|
305 |
+
|
306 |
+
elif pdf is None:
|
307 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">Please Upload Your Resume</h5>', unsafe_allow_html=True)
|
308 |
+
|
309 |
+
elif openai_api_key == '':
|
310 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">Please Enter OpenAI API Key</h5>', unsafe_allow_html=True)
|
311 |
|
|
|
|
|
312 |
|
313 |
|
314 |
class linkedin_scraper:
|
|
|
331 |
with st.form(key='linkedin_scarp'):
|
332 |
|
333 |
add_vertical_space(1)
|
334 |
+
col1,col2,col3 = st.columns([0.5,0.3,0.2], gap='medium')
|
335 |
with col1:
|
336 |
+
job_title_input = st.text_input(label='Job Title')
|
337 |
+
job_title_input = job_title_input.split()
|
338 |
with col2:
|
339 |
+
job_location = st.text_input(label='Job Location', value='India')
|
340 |
+
with col3:
|
341 |
job_count = st.number_input(label='Job Count', min_value=1, value=1, step=1)
|
342 |
|
343 |
# Submit Button
|
|
|
345 |
submit = st.form_submit_button(label='Submit')
|
346 |
add_vertical_space(1)
|
347 |
|
348 |
+
return job_title_input, job_location, job_count, submit
|
349 |
|
350 |
|
351 |
+
def build_url(job_title, job_location):
|
352 |
|
353 |
b = []
|
354 |
for i in job_title:
|
|
|
357 |
b.append(y)
|
358 |
|
359 |
job_title = '%2C%20'.join(b)
|
360 |
+
link = f"https://in.linkedin.com/jobs/search?keywords={job_title}&location={job_location}&locationId=&geoId=102713980&f_TPR=r604800&position=1&pageNum=0"
|
361 |
|
362 |
return link
|
363 |
|
|
|
395 |
matched_words = list(set(user_input).intersection(set(scrap_title)))
|
396 |
|
397 |
# Return Job Title if there are more than 1 matched word else return NaN
|
398 |
+
if len(user_input) > 1:
|
399 |
+
return scrap_job_title if len(matched_words) > 1 else np.nan
|
400 |
+
|
401 |
+
else:
|
402 |
+
return scrap_job_title if len(matched_words) == 1 else np.nan
|
403 |
|
404 |
|
405 |
+
def scrap_company_data(driver, job_title_input, job_location):
|
406 |
|
407 |
# scraping the Company Data
|
408 |
company = driver.find_elements(by=By.CSS_SELECTOR, value='h4[class="base-search-card__subtitle"]')
|
|
|
426 |
# Return Job Title if there are more than 1 matched word else return NaN
|
427 |
df['Job Title'] = df['Job Title'].apply(lambda x: linkedin_scraper.job_title_filter(x, job_title_input))
|
428 |
|
429 |
+
# Return Location if User Job Location in Scraped Location else return NaN
|
430 |
+
df['Location'] = df['Location'].apply(lambda x: x if job_location.lower() in x.lower() else np.nan)
|
431 |
+
|
432 |
# Drop Null Values and Reset Index
|
433 |
df = df.dropna()
|
434 |
df.reset_index(drop=True, inplace=True)
|
|
|
504 |
def main():
|
505 |
|
506 |
# Initially set driver to None
|
507 |
+
driver = None
|
508 |
|
509 |
+
try:
|
510 |
+
job_title_input, job_location, job_count, submit = linkedin_scraper.get_userinput()
|
511 |
add_vertical_space(2)
|
512 |
+
|
513 |
if submit:
|
514 |
+
if job_title_input != [] and job_location != '':
|
515 |
|
516 |
+
with st.spinner('Chrome Webdriver Setup Initializing...'):
|
517 |
driver = linkedin_scraper.webdriver_setup()
|
518 |
|
519 |
+
with st.spinner('Loading More Job Listings...'):
|
520 |
|
521 |
# build URL based on User Job Title Input
|
522 |
+
link = linkedin_scraper.build_url(job_title_input, job_location)
|
523 |
|
524 |
# Open the Link in LinkedIn and Scroll Down the Page
|
525 |
linkedin_scraper.link_open_scrolldown(driver, link, job_count)
|
526 |
|
527 |
+
with st.spinner('scraping Job Details...'):
|
528 |
+
|
529 |
+
# Scraping the Company Name, Location, Job Title and URL Data
|
530 |
+
df = linkedin_scraper.scrap_company_data(driver, job_title_input, job_location)
|
531 |
+
|
532 |
+
# Scraping the Job Descriptin Data
|
533 |
df_final = linkedin_scraper. scrap_job_description(driver, df, job_count)
|
534 |
|
535 |
# Display the Data in User Interface
|
|
|
537 |
|
538 |
|
539 |
# If User Click Submit Button and Job Title is Empty
|
540 |
+
elif job_title_input == []:
|
541 |
st.markdown(f'<h5 style="text-align: center;color: orange;">Job Title is Empty</h5>',
|
542 |
unsafe_allow_html=True)
|
543 |
+
|
544 |
+
elif job_location == '':
|
545 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">Job Location is Empty</h5>',
|
546 |
+
unsafe_allow_html=True)
|
547 |
|
548 |
+
except Exception as e:
|
549 |
+
add_vertical_space(2)
|
550 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">{e}</h5>', unsafe_allow_html=True)
|
551 |
|
552 |
+
finally:
|
553 |
+
if driver:
|
554 |
+
driver.quit()
|
555 |
|
556 |
|
557 |
|
558 |
# Streamlit Configuration Setup
|
559 |
streamlit_config()
|
560 |
+
add_vertical_space(5)
|
561 |
+
|
562 |
|
563 |
|
|
|
564 |
with st.sidebar:
|
565 |
|
566 |
add_vertical_space(3)
|
|
|
569 |
icons=['house-fill', 'database-fill', 'pass-fill', 'list-ul', 'linkedin', 'sign-turn-right-fill'])
|
570 |
|
571 |
|
|
|
|
|
|
|
|
|
|
|
572 |
|
573 |
+
if option == 'Summary':
|
|
|
|
|
574 |
|
575 |
+
resume_analyzer.resume_summary()
|
|
|
576 |
|
|
|
|
|
|
|
|
|
|
|
|
|
577 |
|
578 |
|
579 |
elif option == 'Strength':
|
580 |
|
581 |
+
resume_analyzer.resume_strength()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
582 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
583 |
|
584 |
|
585 |
elif option == 'Weakness':
|
586 |
|
587 |
+
resume_analyzer.resume_weakness()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
588 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
589 |
|
590 |
|
591 |
elif option == 'Job Titles':
|
592 |
|
593 |
+
resume_analyzer.job_title_suggestion()
|
|
|
|
|
594 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
595 |
|
596 |
|
597 |
elif option == 'Linkedin Jobs':
|
598 |
+
|
|
|
599 |
linkedin_scraper.main()
|
600 |
|
601 |
|
602 |
+
|
603 |
elif option == 'Exit':
|
604 |
|
|
|
605 |
col1, col2, col3 = st.columns([0.3,0.4,0.3])
|
606 |
with col2:
|
607 |
st.success('Thank you for your time. Exiting the application')
|