Spaces:
Sleeping
Sleeping
bsiddhharth
commited on
Commit
Β·
ecb7130
1
Parent(s):
09a1406
Updated cv_analyzer_search.py with new features or bug fixes
Browse files- cv_analyzer_search.py +196 -57
cv_analyzer_search.py
CHANGED
@@ -19,6 +19,11 @@ import os
|
|
19 |
import logging
|
20 |
|
21 |
|
|
|
|
|
|
|
|
|
|
|
22 |
os.environ['GROQ_API_KEY'] = os.getenv("GROQ_API_KEY")
|
23 |
groq_api_key = os.getenv("GROQ_API_KEY")
|
24 |
|
@@ -87,15 +92,15 @@ class JobSuggestionEngine:
|
|
87 |
|
88 |
logger.debug(f"Calling Groq API with prompt: {prompt[:100]}...") # start of api call
|
89 |
|
90 |
-
#
|
91 |
chat_completion = self.client.chat.completions.create(
|
92 |
messages=[
|
93 |
{"role": "system", "content": "You are a career advisor generating job suggestions based on resume details."},
|
94 |
{"role": "user", "content": prompt}
|
95 |
],
|
96 |
-
model="llama3-8b-8192",
|
97 |
-
temperature=0.7,
|
98 |
-
max_tokens=1024,
|
99 |
top_p=1,
|
100 |
stop=None,
|
101 |
stream=False
|
@@ -107,7 +112,7 @@ class JobSuggestionEngine:
|
|
107 |
|
108 |
logger.info(f"Job suggestions generated: {len(suggestions_data.get('job_suggestions', []))} found")
|
109 |
|
110 |
-
# Return job suggestions,
|
111 |
return suggestions_data.get('job_suggestions', [])
|
112 |
|
113 |
except Exception as e:
|
@@ -117,8 +122,34 @@ class JobSuggestionEngine:
|
|
117 |
|
118 |
def Job_assistant():
|
119 |
st.title("π Job Suggestion & Search Assistant")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
120 |
|
121 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
122 |
tab1, tab2 = st.tabs(["Resume Analysis", "Direct Job Search"])
|
123 |
|
124 |
|
@@ -143,6 +174,7 @@ def Job_assistant():
|
|
143 |
# st.stop()
|
144 |
|
145 |
if uploaded_resume:
|
|
|
146 |
# Process Resume
|
147 |
with st.spinner("Analyzing Resume..."):
|
148 |
try:
|
@@ -158,6 +190,8 @@ def Job_assistant():
|
|
158 |
logger.error("No candidates extracted from resume")
|
159 |
st.stop()
|
160 |
|
|
|
|
|
161 |
# Display extracted candidate information
|
162 |
st.subheader("Resume Analysis")
|
163 |
display_candidates_info(candidates)
|
@@ -170,19 +204,23 @@ def Job_assistant():
|
|
170 |
st.stop()
|
171 |
|
172 |
# Initialize Job Suggestion Engine
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
|
|
|
|
|
|
|
|
186 |
|
187 |
|
188 |
try:
|
@@ -190,14 +228,15 @@ def Job_assistant():
|
|
190 |
resume_text = process_file(uploaded_resume)
|
191 |
logger.info("Resume text extracted again for improvement suggestions")
|
192 |
|
193 |
-
# Initialize Improvement Engine
|
194 |
improvement_engine = ResumeImprovementEngine()
|
195 |
|
196 |
# Generate Improvement Suggestions
|
197 |
improvement_suggestions = improvement_engine.generate_resume_improvement_suggestions(resume_text)
|
198 |
logger.info("Resume improvement suggestions generated")
|
|
|
199 |
|
200 |
-
# Display
|
201 |
st.subheader("π Comprehensive Resume Analysis")
|
202 |
|
203 |
# Overall Assessment
|
@@ -271,39 +310,51 @@ def Job_assistant():
|
|
271 |
st.header("π Direct Job Search")
|
272 |
|
273 |
# Job Search Parameters
|
274 |
-
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
281 |
-
|
282 |
-
|
283 |
-
|
284 |
-
|
285 |
-
|
286 |
-
|
287 |
-
|
288 |
-
|
289 |
-
|
290 |
-
|
291 |
-
|
292 |
-
|
293 |
-
|
294 |
-
|
295 |
-
|
296 |
-
|
297 |
-
|
298 |
-
|
299 |
-
|
300 |
-
|
301 |
-
|
302 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
303 |
with st.spinner("Searching Jobs..."):
|
304 |
-
# Perform job search
|
305 |
try:
|
306 |
-
|
307 |
jobs = scrape_jobs(
|
308 |
site_name=site_name,
|
309 |
search_term=search_term,
|
@@ -313,16 +364,15 @@ def Job_assistant():
|
|
313 |
hours_old=hours_old,
|
314 |
country_indeed=country_indeed,
|
315 |
)
|
|
|
316 |
|
317 |
if len(jobs) > 0:
|
318 |
st.success(f"Found {len(jobs)} jobs")
|
319 |
|
320 |
jobs_filtered = jobs[['site', 'job_url', 'title', 'company', 'location', 'date_posted']]
|
321 |
-
|
322 |
-
|
323 |
-
|
324 |
-
|
325 |
-
# Option to download jobs
|
326 |
csv_file = jobs.to_csv(index=False)
|
327 |
st.download_button(
|
328 |
label="Download Jobs as CSV",
|
@@ -336,6 +386,95 @@ def Job_assistant():
|
|
336 |
except Exception as e:
|
337 |
st.error(f"Job Search Error: {e}")
|
338 |
logger.error(f"Job Search Error: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
339 |
|
340 |
|
341 |
|
|
|
19 |
import logging
|
20 |
|
21 |
|
22 |
+
def make_clickable_link(link):
|
23 |
+
return f'<a href="{link}" target="_blank">{link}</a>'
|
24 |
+
|
25 |
+
|
26 |
+
|
27 |
os.environ['GROQ_API_KEY'] = os.getenv("GROQ_API_KEY")
|
28 |
groq_api_key = os.getenv("GROQ_API_KEY")
|
29 |
|
|
|
92 |
|
93 |
logger.debug(f"Calling Groq API with prompt: {prompt[:100]}...") # start of api call
|
94 |
|
95 |
+
# API call to the Groq client for chat completions
|
96 |
chat_completion = self.client.chat.completions.create(
|
97 |
messages=[
|
98 |
{"role": "system", "content": "You are a career advisor generating job suggestions based on resume details."},
|
99 |
{"role": "user", "content": prompt}
|
100 |
],
|
101 |
+
model="llama3-8b-8192",
|
102 |
+
temperature=0.7,
|
103 |
+
max_tokens=1024,
|
104 |
top_p=1,
|
105 |
stop=None,
|
106 |
stream=False
|
|
|
112 |
|
113 |
logger.info(f"Job suggestions generated: {len(suggestions_data.get('job_suggestions', []))} found")
|
114 |
|
115 |
+
# Return job suggestions, if not found -> empty list
|
116 |
return suggestions_data.get('job_suggestions', [])
|
117 |
|
118 |
except Exception as e:
|
|
|
122 |
|
123 |
def Job_assistant():
|
124 |
st.title("π Job Suggestion & Search Assistant")
|
125 |
+
|
126 |
+
# Initialize session state for resume analysis tab
|
127 |
+
if 'uploaded_resume' not in st.session_state:
|
128 |
+
st.session_state.uploaded_resume = None
|
129 |
+
if 'resume_data' not in st.session_state:
|
130 |
+
st.session_state.resume_data = None
|
131 |
+
if 'job_suggestions' not in st.session_state:
|
132 |
+
st.session_state.job_suggestions = []
|
133 |
+
if 'improvement_suggestions' not in st.session_state:
|
134 |
+
st.session_state.improvement_suggestions = {}
|
135 |
|
136 |
+
# Initialize session state for job search tab
|
137 |
+
if 'site_name' not in st.session_state:
|
138 |
+
st.session_state.site_name = ["indeed", "glassdoor"]
|
139 |
+
if 'search_term' not in st.session_state:
|
140 |
+
st.session_state.search_term = "software engineer"
|
141 |
+
if 'location' not in st.session_state:
|
142 |
+
st.session_state.location = "San Francisco, CA"
|
143 |
+
if 'results_wanted' not in st.session_state:
|
144 |
+
st.session_state.results_wanted = 20
|
145 |
+
if 'hours_old' not in st.session_state:
|
146 |
+
st.session_state.hours_old = 72
|
147 |
+
if 'country_indeed' not in st.session_state:
|
148 |
+
st.session_state.country_indeed = "USA"
|
149 |
+
if 'job_search_results' not in st.session_state:
|
150 |
+
st.session_state.job_search_results = pd.DataFrame()
|
151 |
+
|
152 |
+
# Tabs for functionalities
|
153 |
tab1, tab2 = st.tabs(["Resume Analysis", "Direct Job Search"])
|
154 |
|
155 |
|
|
|
174 |
# st.stop()
|
175 |
|
176 |
if uploaded_resume:
|
177 |
+
st.session_state.uploaded_resume = uploaded_resume
|
178 |
# Process Resume
|
179 |
with st.spinner("Analyzing Resume..."):
|
180 |
try:
|
|
|
190 |
logger.error("No candidates extracted from resume")
|
191 |
st.stop()
|
192 |
|
193 |
+
st.session_state.resume_data = candidates[0]
|
194 |
+
|
195 |
# Display extracted candidate information
|
196 |
st.subheader("Resume Analysis")
|
197 |
display_candidates_info(candidates)
|
|
|
204 |
st.stop()
|
205 |
|
206 |
# Initialize Job Suggestion Engine
|
207 |
+
if st.session_state.resume_data:
|
208 |
+
suggestion_engine = JobSuggestionEngine()
|
209 |
+
logger.info("Job_Suggestion_Engine initialized")
|
210 |
+
|
211 |
+
# Generate Job Suggestions
|
212 |
+
job_suggestions = suggestion_engine.generate_job_suggestions(resume_data)
|
213 |
+
logger.info(f"Generated {len(job_suggestions)} job suggestions")
|
214 |
+
|
215 |
+
st.session_state.job_suggestions = job_suggestions
|
216 |
+
|
217 |
+
# Display Job Suggestions
|
218 |
+
st.header("π― Job Suggestions")
|
219 |
+
# for suggestion in job_suggestions:
|
220 |
+
for suggestion in st.session_state.job_suggestions:
|
221 |
+
with st.expander(f"{suggestion.get('role', 'Unnamed Role')}"):
|
222 |
+
st.write(f"**Description:** {suggestion.get('description', 'No description')}")
|
223 |
+
st.write(f"**Suitability:** {suggestion.get('suitability_reason', 'Not specified')}")
|
224 |
|
225 |
|
226 |
try:
|
|
|
228 |
resume_text = process_file(uploaded_resume)
|
229 |
logger.info("Resume text extracted again for improvement suggestions")
|
230 |
|
231 |
+
# Initialize Resume Improvement Engine
|
232 |
improvement_engine = ResumeImprovementEngine()
|
233 |
|
234 |
# Generate Improvement Suggestions
|
235 |
improvement_suggestions = improvement_engine.generate_resume_improvement_suggestions(resume_text)
|
236 |
logger.info("Resume improvement suggestions generated")
|
237 |
+
st.session_state.improvement_suggestions = improvement_suggestions
|
238 |
|
239 |
+
# Display Suggestions
|
240 |
st.subheader("π Comprehensive Resume Analysis")
|
241 |
|
242 |
# Overall Assessment
|
|
|
310 |
st.header("π Direct Job Search")
|
311 |
|
312 |
# Job Search Parameters
|
313 |
+
|
314 |
+
with st.form(key='job_search_form'):
|
315 |
+
# Job Search Parameters
|
316 |
+
col1, col2, col3, col4 = st.columns(4)
|
317 |
+
|
318 |
+
with col1:
|
319 |
+
site_name = st.multiselect(
|
320 |
+
"Select Job Sites",
|
321 |
+
["indeed", "linkedin", "zip_recruiter", "glassdoor", "google"],
|
322 |
+
default=st.session_state.site_name
|
323 |
+
)
|
324 |
+
|
325 |
+
with col2:
|
326 |
+
search_term = st.text_input("Search Term", st.session_state.search_term)
|
327 |
+
|
328 |
+
with col3:
|
329 |
+
location = st.text_input("Location", st.session_state.location)
|
330 |
+
|
331 |
+
with col4:
|
332 |
+
results_wanted = st.number_input("Number of Results", min_value=1, max_value=100, value=st.session_state.results_wanted)
|
333 |
+
|
334 |
+
# Additional parameters
|
335 |
+
col5, col6 = st.columns(2)
|
336 |
+
|
337 |
+
with col5:
|
338 |
+
hours_old = st.number_input("Jobs Posted Within (hours)", min_value=1, max_value=168, value=st.session_state.hours_old)
|
339 |
+
|
340 |
+
with col6:
|
341 |
+
country_indeed = st.text_input("Country (for Indeed)", st.session_state.country_indeed)
|
342 |
+
|
343 |
+
# Submit button inside the form
|
344 |
+
submit_button = st.form_submit_button("Search Jobs")
|
345 |
+
|
346 |
+
# Only run search when form is submitted
|
347 |
+
if submit_button:
|
348 |
+
st.session_state.site_name = site_name
|
349 |
+
st.session_state.search_term = search_term
|
350 |
+
st.session_state.location = location
|
351 |
+
st.session_state.results_wanted = results_wanted
|
352 |
+
st.session_state.hours_old = hours_old
|
353 |
+
st.session_state.country_indeed = country_indeed
|
354 |
+
|
355 |
with st.spinner("Searching Jobs..."):
|
|
|
356 |
try:
|
357 |
+
# Your existing job search code here
|
358 |
jobs = scrape_jobs(
|
359 |
site_name=site_name,
|
360 |
search_term=search_term,
|
|
|
364 |
hours_old=hours_old,
|
365 |
country_indeed=country_indeed,
|
366 |
)
|
367 |
+
st.session_state.job_search_results = jobs
|
368 |
|
369 |
if len(jobs) > 0:
|
370 |
st.success(f"Found {len(jobs)} jobs")
|
371 |
|
372 |
jobs_filtered = jobs[['site', 'job_url', 'title', 'company', 'location', 'date_posted']]
|
373 |
+
jobs_filtered['job_url'] = jobs_filtered['job_url'].apply(make_clickable_link)
|
374 |
+
st.write(jobs_filtered.to_html(escape=False), unsafe_allow_html=True)
|
375 |
+
|
|
|
|
|
376 |
csv_file = jobs.to_csv(index=False)
|
377 |
st.download_button(
|
378 |
label="Download Jobs as CSV",
|
|
|
386 |
except Exception as e:
|
387 |
st.error(f"Job Search Error: {e}")
|
388 |
logger.error(f"Job Search Error: {e}")
|
389 |
+
# col1, col2, col3, col4 = st.columns(4)
|
390 |
+
|
391 |
+
# with col1:
|
392 |
+
# site_name = st.multiselect(
|
393 |
+
# "Select Job Sites",
|
394 |
+
# ["indeed", "linkedin", "zip_recruiter", "glassdoor", "google"],
|
395 |
+
# default=st.session_state.site_name
|
396 |
+
# # default=["indeed", "glassdoor"]
|
397 |
+
# )
|
398 |
+
# st.session_state.site_name = site_name
|
399 |
+
|
400 |
+
# with col2:
|
401 |
+
# search_term = st.text_input("Search Term", st.session_state.search_term)
|
402 |
+
# st.session_state.search_term = search_term
|
403 |
+
|
404 |
+
# with col3:
|
405 |
+
# location = st.text_input("Location", st.session_state.location)
|
406 |
+
# st.session_state.location = location
|
407 |
+
|
408 |
+
|
409 |
+
# with col4:
|
410 |
+
# results_wanted = st.number_input("Number of Results", min_value=1, max_value=100, value=st.session_state.results_wanted)
|
411 |
+
# st.session_state.results_wanted = results_wanted
|
412 |
+
|
413 |
+
# # Additional parameters
|
414 |
+
# col5, col6 = st.columns(2)
|
415 |
+
|
416 |
+
# with col5:
|
417 |
+
# hours_old = st.number_input("Jobs Posted Within (hours)", min_value=1, max_value=168, value=st.session_state.hours_old)
|
418 |
+
# st.session_state.hours_old = hours_old
|
419 |
+
|
420 |
+
# with col6:
|
421 |
+
# country_indeed = st.text_input("Country (for Indeed)", st.session_state.country_indeed)
|
422 |
+
# st.session_state.country_indeed = country_indeed
|
423 |
+
|
424 |
+
# search_button_clicked = st.button("Search Jobs")
|
425 |
+
|
426 |
+
# # Search Button
|
427 |
+
# # if st.button("Search Jobs"):
|
428 |
+
# if search_button_clicked:
|
429 |
+
# with st.spinner("Searching Jobs..."):
|
430 |
+
# # Perform job search
|
431 |
+
# try:
|
432 |
+
# logger.info(f"Performing job search with {search_term} in {location}")
|
433 |
+
# # jobs = scrape_jobs(
|
434 |
+
# # site_name=site_name,
|
435 |
+
# # search_term=search_term,
|
436 |
+
# # google_search_term=f"{search_term} jobs near {location}",
|
437 |
+
# # location=location,
|
438 |
+
# # results_wanted=results_wanted,
|
439 |
+
# # hours_old=hours_old,
|
440 |
+
# # country_indeed=country_indeed,
|
441 |
+
# # )
|
442 |
+
# jobs = scrape_jobs(
|
443 |
+
# site_name=st.session_state.site_name,
|
444 |
+
# search_term=st.session_state.search_term,
|
445 |
+
# google_search_term=f"{st.session_state.search_term} jobs near {st.session_state.location}",
|
446 |
+
# location=st.session_state.location,
|
447 |
+
# results_wanted=st.session_state.results_wanted,
|
448 |
+
# hours_old=st.session_state.hours_old,
|
449 |
+
# country_indeed=st.session_state.country_indeed,
|
450 |
+
# )
|
451 |
+
# st.session_state.job_search_results = jobs
|
452 |
+
|
453 |
+
# if len(jobs) > 0:
|
454 |
+
# st.success(f"Found {len(jobs)} jobs")
|
455 |
+
|
456 |
+
# jobs_filtered = jobs[['site', 'job_url', 'title', 'company', 'location', 'date_posted']]
|
457 |
+
# # Display job data in a table
|
458 |
+
# # st.dataframe(jobs)
|
459 |
+
# jobs_filtered['job_url'] = jobs_filtered['job_url'].apply(make_clickable_link)
|
460 |
+
# st.write(jobs_filtered.to_html(escape=False), unsafe_allow_html=True)
|
461 |
+
|
462 |
+
# # st.dataframe(jobs_filtered)
|
463 |
+
|
464 |
+
# # Option to download jobs
|
465 |
+
# csv_file = jobs.to_csv(index=False)
|
466 |
+
# st.download_button(
|
467 |
+
# label="Download Jobs as CSV",
|
468 |
+
# data=csv_file,
|
469 |
+
# file_name='job_search_results.csv',
|
470 |
+
# mime='text/csv'
|
471 |
+
# )
|
472 |
+
# else:
|
473 |
+
# st.warning("No jobs found")
|
474 |
+
|
475 |
+
# except Exception as e:
|
476 |
+
# st.error(f"Job Search Error: {e}")
|
477 |
+
# logger.error(f"Job Search Error: {e}")
|
478 |
|
479 |
|
480 |
|