MandarBhalerao
commited on
Commit
•
e5db578
1
Parent(s):
3a95020
Initial commit
Browse files- .gitignore +162 -0
- app/chains.py +94 -0
- app/generating_prompts_from_resume.py +65 -0
- app/main.py +46 -0
- app/utils.py +26 -0
- email_generator.ipynb +1036 -0
- experimenting.ipynb +712 -0
- final_mandar.ipynb +1472 -0
- requirements.txt +9 -0
- tutorial_groq.ipynb +56 -0
.gitignore
ADDED
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Byte-compiled / optimized / DLL files
|
2 |
+
__pycache__/
|
3 |
+
*.py[cod]
|
4 |
+
*$py.class
|
5 |
+
|
6 |
+
# C extensions
|
7 |
+
*.so
|
8 |
+
|
9 |
+
# Distribution / packaging
|
10 |
+
.Python
|
11 |
+
build/
|
12 |
+
develop-eggs/
|
13 |
+
dist/
|
14 |
+
downloads/
|
15 |
+
eggs/
|
16 |
+
.eggs/
|
17 |
+
lib/
|
18 |
+
lib64/
|
19 |
+
parts/
|
20 |
+
sdist/
|
21 |
+
var/
|
22 |
+
wheels/
|
23 |
+
share/python-wheels/
|
24 |
+
*.egg-info/
|
25 |
+
.installed.cfg
|
26 |
+
*.egg
|
27 |
+
MANIFEST
|
28 |
+
|
29 |
+
# PyInstaller
|
30 |
+
# Usually these files are written by a python script from a template
|
31 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
32 |
+
*.manifest
|
33 |
+
*.spec
|
34 |
+
|
35 |
+
# Installer logs
|
36 |
+
pip-log.txt
|
37 |
+
pip-delete-this-directory.txt
|
38 |
+
|
39 |
+
# Unit test / coverage reports
|
40 |
+
htmlcov/
|
41 |
+
.tox/
|
42 |
+
.nox/
|
43 |
+
.coverage
|
44 |
+
.coverage.*
|
45 |
+
.cache
|
46 |
+
nosetests.xml
|
47 |
+
coverage.xml
|
48 |
+
*.cover
|
49 |
+
*.py,cover
|
50 |
+
.hypothesis/
|
51 |
+
.pytest_cache/
|
52 |
+
cover/
|
53 |
+
|
54 |
+
# Translations
|
55 |
+
*.mo
|
56 |
+
*.pot
|
57 |
+
|
58 |
+
# Django stuff:
|
59 |
+
*.log
|
60 |
+
local_settings.py
|
61 |
+
db.sqlite3
|
62 |
+
db.sqlite3-journal
|
63 |
+
|
64 |
+
# Flask stuff:
|
65 |
+
instance/
|
66 |
+
.webassets-cache
|
67 |
+
|
68 |
+
# Scrapy stuff:
|
69 |
+
.scrapy
|
70 |
+
|
71 |
+
# Sphinx documentation
|
72 |
+
docs/_build/
|
73 |
+
|
74 |
+
# PyBuilder
|
75 |
+
.pybuilder/
|
76 |
+
target/
|
77 |
+
|
78 |
+
# Jupyter Notebook
|
79 |
+
.ipynb_checkpoints
|
80 |
+
|
81 |
+
# IPython
|
82 |
+
profile_default/
|
83 |
+
ipython_config.py
|
84 |
+
|
85 |
+
# pyenv
|
86 |
+
# For a library or package, you might want to ignore these files since the code is
|
87 |
+
# intended to run in multiple environments; otherwise, check them in:
|
88 |
+
# .python-version
|
89 |
+
|
90 |
+
# pipenv
|
91 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
92 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
93 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
94 |
+
# install all needed dependencies.
|
95 |
+
#Pipfile.lock
|
96 |
+
|
97 |
+
# poetry
|
98 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
99 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
100 |
+
# commonly ignored for libraries.
|
101 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
102 |
+
#poetry.lock
|
103 |
+
|
104 |
+
# pdm
|
105 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
106 |
+
#pdm.lock
|
107 |
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
108 |
+
# in version control.
|
109 |
+
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
110 |
+
.pdm.toml
|
111 |
+
.pdm-python
|
112 |
+
.pdm-build/
|
113 |
+
|
114 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
115 |
+
__pypackages__/
|
116 |
+
|
117 |
+
# Celery stuff
|
118 |
+
celerybeat-schedule
|
119 |
+
celerybeat.pid
|
120 |
+
|
121 |
+
# SageMath parsed files
|
122 |
+
*.sage.py
|
123 |
+
|
124 |
+
# Environments
|
125 |
+
.env
|
126 |
+
.venv
|
127 |
+
env/
|
128 |
+
venv/
|
129 |
+
ENV/
|
130 |
+
env.bak/
|
131 |
+
venv.bak/
|
132 |
+
|
133 |
+
# Spyder project settings
|
134 |
+
.spyderproject
|
135 |
+
.spyproject
|
136 |
+
|
137 |
+
# Rope project settings
|
138 |
+
.ropeproject
|
139 |
+
|
140 |
+
# mkdocs documentation
|
141 |
+
/site
|
142 |
+
|
143 |
+
# mypy
|
144 |
+
.mypy_cache/
|
145 |
+
.dmypy.json
|
146 |
+
dmypy.json
|
147 |
+
|
148 |
+
# Pyre type checker
|
149 |
+
.pyre/
|
150 |
+
|
151 |
+
# pytype static type analyzer
|
152 |
+
.pytype/
|
153 |
+
|
154 |
+
# Cython debug symbols
|
155 |
+
cython_debug/
|
156 |
+
|
157 |
+
# PyCharm
|
158 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
159 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
160 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
161 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
162 |
+
.idea/
|
app/chains.py
ADDED
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from langchain_groq import ChatGroq
|
3 |
+
from langchain_core.prompts import PromptTemplate
|
4 |
+
from langchain_core.output_parsers import JsonOutputParser
|
5 |
+
from langchain_core.exceptions import OutputParserException
|
6 |
+
from dotenv import load_dotenv
|
7 |
+
|
8 |
+
|
9 |
+
|
10 |
+
# using this we can have a file called .env in your root folder where you can keep your API key.
|
11 |
+
load_dotenv() # This will find the .env file and it will set the things in that file as your environment variable
|
12 |
+
|
13 |
+
# print(os.getenv("GROQ_API_KEY")) # just for testing
|
14 |
+
|
15 |
+
class Chain:
|
16 |
+
def __init__(self):
|
17 |
+
self.llm = ChatGroq(temperature=0, groq_api_key=os.getenv("GROQ_API_KEY"), model_name="llama-3.1-70b-versatile")
|
18 |
+
|
19 |
+
# function for extracting the job description and then passing it to a json parser to convert it to json
|
20 |
+
def extract_jobs(self, cleaned_text):
|
21 |
+
prompt_extract = PromptTemplate.from_template(
|
22 |
+
"""
|
23 |
+
### SCRAPED TEXT FROM WEBSITE:
|
24 |
+
{page_data}
|
25 |
+
### INSTRUCTION:
|
26 |
+
The scraped text is from the career's page of a website.
|
27 |
+
Your job is to extract the job postings and return them in JSON format containing the following keys: `role`, `experience`, `skills` and `description`.
|
28 |
+
Only return the valid JSON.
|
29 |
+
### VALID JSON (NO PREAMBLE):
|
30 |
+
"""
|
31 |
+
)
|
32 |
+
chain_extract = prompt_extract | self.llm
|
33 |
+
res = chain_extract.invoke(input={"page_data": cleaned_text})
|
34 |
+
try:
|
35 |
+
json_parser = JsonOutputParser()
|
36 |
+
res = json_parser.parse(res.content)
|
37 |
+
# Check if the result is a list and extract the first dictionary
|
38 |
+
# if isinstance(json_res, list):
|
39 |
+
# json_res = json_res[0]
|
40 |
+
|
41 |
+
except OutputParserException:
|
42 |
+
raise OutputParserException("Context too big. Unable to parse jobs.")
|
43 |
+
return res if isinstance(res, list) else [res]
|
44 |
+
|
45 |
+
|
46 |
+
def summarize_pdf(self, pdf_data):
|
47 |
+
prompt_extract = PromptTemplate.from_template(
|
48 |
+
"""
|
49 |
+
### PDF DATA OBTAINED FROM RESUME:
|
50 |
+
{pdf_data}
|
51 |
+
### INSTRUCTION:
|
52 |
+
The data is from the resume of a person.
|
53 |
+
Your job is to extract all the details of this person and summarize it in 200 words, which includes name, education, experience, projects, skills.
|
54 |
+
### (NO PREAMBLE):
|
55 |
+
"""
|
56 |
+
)
|
57 |
+
chain_extract = prompt_extract | self.llm # this will form a langchain chain ie you are getting a prompt and passing it to LLM
|
58 |
+
res2 = chain_extract.invoke(input={'pdf_data':pdf_data})
|
59 |
+
# print(res.content)
|
60 |
+
summary = res2.content
|
61 |
+
return summary
|
62 |
+
|
63 |
+
def write_mail(self, job_description, summary):
|
64 |
+
prompt_email = PromptTemplate.from_template(
|
65 |
+
"""
|
66 |
+
### JOB DESCRIPTION:
|
67 |
+
This is a job description
|
68 |
+
|
69 |
+
{job_description}
|
70 |
+
|
71 |
+
### INSTRUCTION:
|
72 |
+
These are the person's details.
|
73 |
+
{summary}
|
74 |
+
Consider yourself as this person.
|
75 |
+
|
76 |
+
Introduce yourself in an engaging way from above with your name from the above details and your current designation.
|
77 |
+
|
78 |
+
Try to find some things in the job description which are similar with your details. Mention those things which are similar.
|
79 |
+
Do not mention anything which is not present in the details.
|
80 |
+
|
81 |
+
Your job is to write a cold email of about 250 words to the hiring manager regarding the job mentioned above describing the capability of you
|
82 |
+
in fulfilling their needs. The cold email must be engaging to read.
|
83 |
+
End the email with Name and Current place where your are working or studying.
|
84 |
+
Do not provide a preamble.
|
85 |
+
### EMAIL (NO PREAMBLE):
|
86 |
+
|
87 |
+
"""
|
88 |
+
)
|
89 |
+
chain_email = prompt_email | self.llm
|
90 |
+
res = chain_email.invoke({"job_description": str(job_description), "summary": summary})
|
91 |
+
return res.content
|
92 |
+
|
93 |
+
# if __name__ == "__main__":
|
94 |
+
# print(os.getenv("GROQ_API_KEY"))
|
app/generating_prompts_from_resume.py
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pdfplumber
|
2 |
+
import re
|
3 |
+
|
4 |
+
def extract_text_from_pdf(pdf_path):
|
5 |
+
with pdfplumber.open(pdf_path) as pdf:
|
6 |
+
pages = [page.extract_text() for page in pdf.pages]
|
7 |
+
all_text = "\n".join(pages) if pages else ""
|
8 |
+
print(all_text)
|
9 |
+
return all_text
|
10 |
+
|
11 |
+
def extract_resume_details(resume_text):
|
12 |
+
# Example regex patterns to extract different parts of the resume
|
13 |
+
name_match = re.search(r"Name:\s*(.*)", resume_text)
|
14 |
+
name = name_match.group(1).strip() if name_match else "Name not found"
|
15 |
+
|
16 |
+
education_match = re.search(r"Education:(.*?)(?=\nExperience:)", resume_text, re.DOTALL)
|
17 |
+
education = education_match.group(1).strip() if education_match else "Education details not found"
|
18 |
+
|
19 |
+
experience_match = re.search(r"Experience:(.*?)(?=\nProjects:)", resume_text, re.DOTALL)
|
20 |
+
experience = experience_match.group(1).strip() if experience_match else "Experience details not found"
|
21 |
+
|
22 |
+
projects_match = re.search(r"Projects:(.*?)(?=\nSkills:)", resume_text, re.DOTALL)
|
23 |
+
projects = projects_match.group(1).strip() if projects_match else "Project details not found"
|
24 |
+
|
25 |
+
skills_match = re.search(r"Skills:(.*)", resume_text)
|
26 |
+
skills = skills_match.group(1).strip() if skills_match else "Skills details not found"
|
27 |
+
|
28 |
+
achievements_match = re.search(r"Achievements:(.*)", resume_text)
|
29 |
+
achievements = achievements_match.group(1).strip() if achievements_match else "Achievements not found"
|
30 |
+
|
31 |
+
return {
|
32 |
+
"name": name,
|
33 |
+
"education": education,
|
34 |
+
"experience": experience,
|
35 |
+
"projects": projects,
|
36 |
+
"skills": skills,
|
37 |
+
"achievements": achievements
|
38 |
+
}
|
39 |
+
|
40 |
+
def generate_cold_email(details):
|
41 |
+
return f"""
|
42 |
+
You are {details['name']}, a graduate from {details['education']}. Your professional experience includes {details['experience']}. You have led projects such as {details['projects']} and are skilled in {details['skills']}. You have also achieved {details['achievements']}.
|
43 |
+
|
44 |
+
Your task is to write a cold email to a potential employer or client, showcasing your skills and experiences detailed above. Mention your hands-on experience with technologies and how you can contribute to solving real-world problems.
|
45 |
+
|
46 |
+
Remember, you are {details['name']}, ready to make a significant impact in your new role.
|
47 |
+
"""
|
48 |
+
|
49 |
+
def process_resume(pdf_path):
|
50 |
+
text = extract_text_from_pdf(pdf_path)
|
51 |
+
details = extract_resume_details(text)
|
52 |
+
email_prompt = generate_cold_email(details)
|
53 |
+
|
54 |
+
output_path = "Cold_Email_Prompt.txt"
|
55 |
+
with open(output_path, "w") as file:
|
56 |
+
file.write(email_prompt)
|
57 |
+
|
58 |
+
return output_path
|
59 |
+
|
60 |
+
# Example usage
|
61 |
+
# pdf_path = "C:\Users\Admin\Downloads\Mandar_Bhalerao_IISc.pdf" # Use the actual path to the PDF file
|
62 |
+
pdf_path = "C:/Users/Admin/Downloads/Mandar_Bhalerao_IISc.pdf"
|
63 |
+
|
64 |
+
output_path = process_resume(pdf_path)
|
65 |
+
print(f"Cold email prompt saved at: {output_path}")
|
app/main.py
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from langchain_community.document_loaders import WebBaseLoader
|
3 |
+
|
4 |
+
from chains import Chain
|
5 |
+
# from portfolio import Portfolio
|
6 |
+
from utils import clean_text, extract_text_from_pdf
|
7 |
+
|
8 |
+
|
9 |
+
def create_streamlit_app(llm, clean_text):
|
10 |
+
st.title("📧 Welcome to Cold E-Mail Generator")
|
11 |
+
|
12 |
+
# PDF upload section
|
13 |
+
uploaded_file = st.file_uploader("Upload your resume as PDF", type=["pdf"])
|
14 |
+
pdf_text = extract_text_from_pdf(uploaded_file)
|
15 |
+
# if pdf_text:
|
16 |
+
# st.text_area("Extracted Text", value=pdf_text, height=300)
|
17 |
+
|
18 |
+
|
19 |
+
url_input = st.text_input("Enter the URL of Job Posting:", value="https://careers.myntra.com/job-detail/?id=7431200002")
|
20 |
+
submit_button = st.button("Generate E-mail")
|
21 |
+
|
22 |
+
if submit_button:
|
23 |
+
try:
|
24 |
+
loader = WebBaseLoader([url_input])
|
25 |
+
data = clean_text(loader.load().pop().page_content) # cleans any unnecessary garbage text
|
26 |
+
jobs = llm.extract_jobs(data) # create json objects for the job
|
27 |
+
for job in jobs: # this is for if one web page has multiple jobs
|
28 |
+
# skills = job.get('skills', [])
|
29 |
+
summarized_text = llm.summarize_pdf(pdf_text)
|
30 |
+
# st.text_area(summarized_text)
|
31 |
+
email = llm.write_mail(job, summarized_text) # write the email
|
32 |
+
# st.code(email, language='markdown')
|
33 |
+
st.text_area("Email is as follows", value=email, height=500)
|
34 |
+
|
35 |
+
# st.code('hello')
|
36 |
+
except Exception as e:
|
37 |
+
st.error(f"An Error Occurred: {e}")
|
38 |
+
|
39 |
+
|
40 |
+
if __name__ == "__main__":
|
41 |
+
chain = Chain()
|
42 |
+
# portfolio = Portfolio()
|
43 |
+
st.set_page_config(layout="wide", page_title="Cold Email Generator", page_icon="📧")
|
44 |
+
create_streamlit_app(chain, clean_text)
|
45 |
+
|
46 |
+
|
app/utils.py
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import re
|
2 |
+
import pdfplumber
|
3 |
+
|
4 |
+
|
5 |
+
# remove not required things and clean the text
|
6 |
+
def clean_text(text):
|
7 |
+
# Remove HTML tags
|
8 |
+
text = re.sub(r'<[^>]*?>', '', text)
|
9 |
+
# Remove URLs
|
10 |
+
text = re.sub(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', '', text)
|
11 |
+
# Remove special characters
|
12 |
+
text = re.sub(r'[^a-zA-Z0-9 ]', '', text)
|
13 |
+
# Replace multiple spaces with a single space
|
14 |
+
text = re.sub(r'\s{2,}', ' ', text)
|
15 |
+
# Trim leading and trailing whitespace
|
16 |
+
text = text.strip()
|
17 |
+
# Remove extra whitespace
|
18 |
+
text = ' '.join(text.split())
|
19 |
+
return text
|
20 |
+
|
21 |
+
def extract_text_from_pdf(uploaded_file):
|
22 |
+
if uploaded_file is not None:
|
23 |
+
with pdfplumber.open(uploaded_file) as pdf:
|
24 |
+
pages = [page.extract_text() for page in pdf.pages]
|
25 |
+
return "\n".join(pages) if pages else ""
|
26 |
+
return ""
|
email_generator.ipynb
ADDED
@@ -0,0 +1,1036 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 1,
|
6 |
+
"id": "0eccd20e",
|
7 |
+
"metadata": {},
|
8 |
+
"outputs": [],
|
9 |
+
"source": [
|
10 |
+
"from langchain_groq import ChatGroq"
|
11 |
+
]
|
12 |
+
},
|
13 |
+
{
|
14 |
+
"cell_type": "code",
|
15 |
+
"execution_count": 2,
|
16 |
+
"id": "c16ff50e",
|
17 |
+
"metadata": {},
|
18 |
+
"outputs": [
|
19 |
+
{
|
20 |
+
"name": "stdout",
|
21 |
+
"output_type": "stream",
|
22 |
+
"text": [
|
23 |
+
"The first person to land on the moon was Neil Armstrong. He stepped onto the lunar surface on July 20, 1969, as part of the Apollo 11 mission.\n"
|
24 |
+
]
|
25 |
+
}
|
26 |
+
],
|
27 |
+
"source": [
|
28 |
+
"llm = ChatGroq(\n",
|
29 |
+
" temperature=0, \n",
|
30 |
+
" groq_api_key='your_api_key_here', \n",
|
31 |
+
" model_name=\"llama-3.1-70b-versatile\"\n",
|
32 |
+
")\n",
|
33 |
+
"# checking the response, and it is very fast\n",
|
34 |
+
"response = llm.invoke(\"The first person to land on moon was ...\")\n",
|
35 |
+
"print(response.content)"
|
36 |
+
]
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"cell_type": "code",
|
40 |
+
"execution_count": 3,
|
41 |
+
"id": "66815076-34c6-4588-bcfc-853ad226d1a9",
|
42 |
+
"metadata": {},
|
43 |
+
"outputs": [],
|
44 |
+
"source": [
|
45 |
+
"# we need to setup a vector database, and we going to use chromadb\n",
|
46 |
+
"# there are other solutions too, but chromadb is open source and very light weight"
|
47 |
+
]
|
48 |
+
},
|
49 |
+
{
|
50 |
+
"cell_type": "code",
|
51 |
+
"execution_count": 4,
|
52 |
+
"id": "90d33612",
|
53 |
+
"metadata": {},
|
54 |
+
"outputs": [
|
55 |
+
{
|
56 |
+
"name": "stderr",
|
57 |
+
"output_type": "stream",
|
58 |
+
"text": [
|
59 |
+
"USER_AGENT environment variable not set, consider setting it to identify your requests.\n"
|
60 |
+
]
|
61 |
+
},
|
62 |
+
{
|
63 |
+
"name": "stdout",
|
64 |
+
"output_type": "stream",
|
65 |
+
"text": [
|
66 |
+
"\n",
|
67 |
+
"\n",
|
68 |
+
"\n",
|
69 |
+
"\n",
|
70 |
+
"\n",
|
71 |
+
"\n",
|
72 |
+
"\n",
|
73 |
+
"\n",
|
74 |
+
"\n",
|
75 |
+
"\n",
|
76 |
+
"\n",
|
77 |
+
"\n",
|
78 |
+
"\n",
|
79 |
+
"\n",
|
80 |
+
"\n",
|
81 |
+
"\n",
|
82 |
+
"\n",
|
83 |
+
"\n",
|
84 |
+
"\n",
|
85 |
+
"\n",
|
86 |
+
"\n",
|
87 |
+
"\n",
|
88 |
+
"\n",
|
89 |
+
"\n",
|
90 |
+
"\n",
|
91 |
+
"\n",
|
92 |
+
"\n",
|
93 |
+
"\n",
|
94 |
+
"\n",
|
95 |
+
"\n",
|
96 |
+
"\n",
|
97 |
+
"\n",
|
98 |
+
"\n",
|
99 |
+
"Data Scientist\n",
|
100 |
+
"\n",
|
101 |
+
"\n",
|
102 |
+
"\n",
|
103 |
+
"\n",
|
104 |
+
"\n",
|
105 |
+
"\n",
|
106 |
+
"\n",
|
107 |
+
"\n",
|
108 |
+
"\n",
|
109 |
+
"\n",
|
110 |
+
"\n",
|
111 |
+
"\n",
|
112 |
+
"\n",
|
113 |
+
"\n",
|
114 |
+
"\n",
|
115 |
+
"\n",
|
116 |
+
"\n",
|
117 |
+
"\n",
|
118 |
+
"\n",
|
119 |
+
"\n",
|
120 |
+
"\n",
|
121 |
+
"\n",
|
122 |
+
"\n",
|
123 |
+
"\n",
|
124 |
+
"\n",
|
125 |
+
"About\n",
|
126 |
+
"Alum\n",
|
127 |
+
"Inclusion\n",
|
128 |
+
"Careers\n",
|
129 |
+
"Culture\n",
|
130 |
+
"Blog\n",
|
131 |
+
"Tech\n",
|
132 |
+
"\n",
|
133 |
+
"\n",
|
134 |
+
"\n",
|
135 |
+
"\n",
|
136 |
+
"\n",
|
137 |
+
"\n",
|
138 |
+
"\n",
|
139 |
+
"\n",
|
140 |
+
"\n",
|
141 |
+
"\n",
|
142 |
+
"Data Scientist\n",
|
143 |
+
"Bengaluru\n",
|
144 |
+
"\n",
|
145 |
+
"\n",
|
146 |
+
"\n",
|
147 |
+
"\n",
|
148 |
+
"\n",
|
149 |
+
"\n",
|
150 |
+
"\n",
|
151 |
+
"\n",
|
152 |
+
"Share\n",
|
153 |
+
"\n",
|
154 |
+
"\n",
|
155 |
+
"\n",
|
156 |
+
"\n",
|
157 |
+
"\n",
|
158 |
+
"\n",
|
159 |
+
"\n",
|
160 |
+
"\n",
|
161 |
+
"\n",
|
162 |
+
"Apply\n",
|
163 |
+
"\n",
|
164 |
+
"\n",
|
165 |
+
"\n",
|
166 |
+
"About Team\n",
|
167 |
+
"Myntra Data Science team delivers a large number of data science solutions for the company which are deployed at various customer touch points every quarter. The models create significant revenue and customer experience impact. The models involve real-time, near-real-time and offline solutions with varying latency requirements. The models are built using massive datasets. You will have the opportunity to be part of a rapidly growing organization and gain exposure to all the parts of a comprehensive ecommerce platform. You’ll also get to learn the intricacies of building models that serve millions of requests per second at sub second latency. \n",
|
168 |
+
"The team takes pride in deploying solutions that not only leverage state of the art machine learning models like graph neural networks, diffusion models, transformers, representation learning, optimization methods and bayesian modeling but also contribute to research literature with multiple peer-reviewed research papers.\n",
|
169 |
+
"Roles and Responsibilities\n",
|
170 |
+
"\n",
|
171 |
+
"Design, develop and deploy machine learning models,algorithms and systems to solve complex business problems for Myntra Recsys, Search, Vision, SCM, Pricing, Forecasting, Trend and Virality prediction, Gen AI and other areas\n",
|
172 |
+
"Theoretical understanding and practise of machine learning and expertise in one or more of the topics, such as, NLP, Computer Vision, recommender systems and Optimisation. \n",
|
173 |
+
"Implement robust and reliable software solutions for model deployment.\n",
|
174 |
+
"Support the team in maintaining machine learning pipelines, contributing to tasks like data cleaning, feature extraction and basic model training.\n",
|
175 |
+
"Participate in monitoring the performance of machine learning models, gaining experience in using statistical methods for evaluation.\n",
|
176 |
+
"Working with the Data Platforms teams for understanding and collecting the data.\n",
|
177 |
+
"Conduct performance testing, troubleshooting and tuning as required.\n",
|
178 |
+
"Stay current with the latest research and technology and communicate your knowledge throughout the enterprise.\n",
|
179 |
+
"\n",
|
180 |
+
"Qualifications & Experience\n",
|
181 |
+
"\n",
|
182 |
+
"Master’s/PhD in Computer Science, Mathematics, Statistics/related fields ‘or’ 1+ years of relevant industry experience with a Bachelor’s degree.\n",
|
183 |
+
"Proficiency in Python or one other high-level programming language.\n",
|
184 |
+
"Theoretical understanding of statistical models such as regression, clustering and ML algorithms such as decision trees, neural networks, etc.\n",
|
185 |
+
"Strong written and verbal communication skills\n",
|
186 |
+
"Intellectual curiosity and enthusiastic about continuous learning\n",
|
187 |
+
"Experience developing machine learning models in Python, or equivalent programming language.\n",
|
188 |
+
"Basic familiarity with machine learning frameworks like TensorFlow, PyTorch, or scikit-learn.\n",
|
189 |
+
"Introductory understanding of statistics as it applies to machine learning.\n",
|
190 |
+
"Ability to manage and prioritize your workload and support his/her manager.\n",
|
191 |
+
"Experience with SQL and/or NoSQL databases.\n",
|
192 |
+
"If you are an exceptional candidate, write in. We are happy to hire you even if you don't have the certified qualifications.\n",
|
193 |
+
"\n",
|
194 |
+
"Nice to Have:\n",
|
195 |
+
"\n",
|
196 |
+
"Publications or presentations in recognized Machine Learning and Data Science journals/conferences.\n",
|
197 |
+
"Experience with ML orchestration tools (Airflow, Kubeflow or MLFlow)\n",
|
198 |
+
"Exposure to GenAI models.\n",
|
199 |
+
"\n",
|
200 |
+
" \n",
|
201 |
+
"\n",
|
202 |
+
"\n",
|
203 |
+
"\n",
|
204 |
+
"\n",
|
205 |
+
"\n",
|
206 |
+
"\n",
|
207 |
+
"\n",
|
208 |
+
"\n",
|
209 |
+
"×\n",
|
210 |
+
"\n",
|
211 |
+
"\n",
|
212 |
+
"\n",
|
213 |
+
"\n",
|
214 |
+
"Apply now\n",
|
215 |
+
"\n",
|
216 |
+
"\n",
|
217 |
+
"\n",
|
218 |
+
"\n",
|
219 |
+
"Name *\n",
|
220 |
+
"\n",
|
221 |
+
"\n",
|
222 |
+
"\n",
|
223 |
+
"\n",
|
224 |
+
"\n",
|
225 |
+
"Last Name *\n",
|
226 |
+
"\n",
|
227 |
+
"\n",
|
228 |
+
"\n",
|
229 |
+
"\n",
|
230 |
+
"\n",
|
231 |
+
"Your Email *\n",
|
232 |
+
"\n",
|
233 |
+
"\n",
|
234 |
+
"\n",
|
235 |
+
"\n",
|
236 |
+
"\n",
|
237 |
+
"Phone *\n",
|
238 |
+
"\n",
|
239 |
+
"\n",
|
240 |
+
"\n",
|
241 |
+
"\n",
|
242 |
+
"\n",
|
243 |
+
"Your current location *\n",
|
244 |
+
"\n",
|
245 |
+
"\n",
|
246 |
+
"\n",
|
247 |
+
"\n",
|
248 |
+
"\n",
|
249 |
+
"Resume/CV *\n",
|
250 |
+
"\n",
|
251 |
+
"\n",
|
252 |
+
"Attach\n",
|
253 |
+
"\n",
|
254 |
+
"×\n",
|
255 |
+
"\n",
|
256 |
+
"\n",
|
257 |
+
"\n",
|
258 |
+
"Cover Letter\n",
|
259 |
+
"\n",
|
260 |
+
"\n",
|
261 |
+
"Attach\n",
|
262 |
+
"Paste\n",
|
263 |
+
"\n",
|
264 |
+
"×\n",
|
265 |
+
"\n",
|
266 |
+
"\n",
|
267 |
+
"\n",
|
268 |
+
"\n",
|
269 |
+
"\n",
|
270 |
+
"Submit \n",
|
271 |
+
"\n",
|
272 |
+
"\n",
|
273 |
+
"\n",
|
274 |
+
"\n",
|
275 |
+
"\n",
|
276 |
+
"\n",
|
277 |
+
"\n",
|
278 |
+
"\n",
|
279 |
+
"\n",
|
280 |
+
"\n",
|
281 |
+
"\n",
|
282 |
+
"\n",
|
283 |
+
"We got your Appliaction, our team will get back to you soon.\n",
|
284 |
+
"\n",
|
285 |
+
"\n",
|
286 |
+
"\n",
|
287 |
+
"\n",
|
288 |
+
"\n",
|
289 |
+
"\n",
|
290 |
+
"\n",
|
291 |
+
"\n",
|
292 |
+
"\n",
|
293 |
+
"\n",
|
294 |
+
"\n",
|
295 |
+
"Looks like the application has not uploaded, Please try agin.\n",
|
296 |
+
"\n",
|
297 |
+
"\n",
|
298 |
+
"\n",
|
299 |
+
"\n",
|
300 |
+
"\n",
|
301 |
+
"\n",
|
302 |
+
"\n",
|
303 |
+
"\n",
|
304 |
+
"\n",
|
305 |
+
"\n",
|
306 |
+
"\n",
|
307 |
+
"\n",
|
308 |
+
"\n",
|
309 |
+
"\n",
|
310 |
+
"\n",
|
311 |
+
"Bengaluru (HQ)\n",
|
312 |
+
"\n",
|
313 |
+
"gurgaon\n",
|
314 |
+
"\n",
|
315 |
+
"Mumbai\n",
|
316 |
+
"\n",
|
317 |
+
"\n",
|
318 |
+
"\n",
|
319 |
+
"\n",
|
320 |
+
"\n",
|
321 |
+
"\n",
|
322 |
+
"contact\n",
|
323 |
+
"Shop\n",
|
324 |
+
"Careers\n",
|
325 |
+
"Privacy Policy\n",
|
326 |
+
"Terms & Conditions\n",
|
327 |
+
"\n",
|
328 |
+
"\n",
|
329 |
+
"Myntra is proud to be an Equal Opportunity Employer\n",
|
330 |
+
"\n",
|
331 |
+
"\n",
|
332 |
+
"© 2019 www.myntra.com. All rights reserved.\n",
|
333 |
+
"\n",
|
334 |
+
"\n",
|
335 |
+
"\n",
|
336 |
+
"\n",
|
337 |
+
"\n",
|
338 |
+
"\n",
|
339 |
+
"\n",
|
340 |
+
"\n",
|
341 |
+
"\n",
|
342 |
+
"\n",
|
343 |
+
"\n",
|
344 |
+
"\n",
|
345 |
+
"\n",
|
346 |
+
"\n",
|
347 |
+
"\n",
|
348 |
+
"\n",
|
349 |
+
"\n",
|
350 |
+
"\n",
|
351 |
+
"\n",
|
352 |
+
"\n",
|
353 |
+
"\n",
|
354 |
+
"\n",
|
355 |
+
"\n",
|
356 |
+
"\n"
|
357 |
+
]
|
358 |
+
}
|
359 |
+
],
|
360 |
+
"source": [
|
361 |
+
"# WebBaseLoader will accept the url and extract the data from that, ie web scraping\n",
|
362 |
+
"\n",
|
363 |
+
"from langchain_community.document_loaders import WebBaseLoader\n",
|
364 |
+
"\n",
|
365 |
+
"loader = WebBaseLoader(\"https://careers.myntra.com/job-detail/?id=7431200002\")\n",
|
366 |
+
"page_data = loader.load().pop().page_content\n",
|
367 |
+
"print(page_data)"
|
368 |
+
]
|
369 |
+
},
|
370 |
+
{
|
371 |
+
"cell_type": "code",
|
372 |
+
"execution_count": 5,
|
373 |
+
"id": "85c89a57",
|
374 |
+
"metadata": {},
|
375 |
+
"outputs": [],
|
376 |
+
"source": [
|
377 |
+
"from langchain_core.prompts import PromptTemplate\n",
|
378 |
+
"# (NO PREAMBLE) means dont give that initial text like Here is your response.\n",
|
379 |
+
"prompt_extract = PromptTemplate.from_template(\n",
|
380 |
+
" \"\"\"\n",
|
381 |
+
" ### SCRAPED TEXT FROM WEBSITE:\n",
|
382 |
+
" {page_data}\n",
|
383 |
+
" ### INSTRUCTION:\n",
|
384 |
+
" The scraped text is from the career's page of a website.\n",
|
385 |
+
" Your job is to extract the job postings and return them in JSON format containing the \n",
|
386 |
+
" following keys: `role`, `experience`, `skills` and `description`.\n",
|
387 |
+
" Only return the valid JSON.\n",
|
388 |
+
" ### VALID JSON (NO PREAMBLE): \n",
|
389 |
+
" \"\"\"\n",
|
390 |
+
")"
|
391 |
+
]
|
392 |
+
},
|
393 |
+
{
|
394 |
+
"cell_type": "code",
|
395 |
+
"execution_count": 6,
|
396 |
+
"id": "5267bb13-3402-4f91-9899-77c8b9e08e48",
|
397 |
+
"metadata": {},
|
398 |
+
"outputs": [
|
399 |
+
{
|
400 |
+
"name": "stdout",
|
401 |
+
"output_type": "stream",
|
402 |
+
"text": [
|
403 |
+
"[\n",
|
404 |
+
" {\n",
|
405 |
+
" \"role\": \"Data Scientist\",\n",
|
406 |
+
" \"experience\": \"1+ years of relevant industry experience with a Bachelor’s degree or Master’s/PhD in Computer Science, Mathematics, Statistics/related fields\",\n",
|
407 |
+
" \"skills\": [\n",
|
408 |
+
" \"Python or one other high-level programming language\",\n",
|
409 |
+
" \"Theoretical understanding of statistical models such as regression, clustering and ML algorithms such as decision trees, neural networks, etc.\",\n",
|
410 |
+
" \"Machine learning frameworks like TensorFlow, PyTorch, or scikit-learn\",\n",
|
411 |
+
" \"SQL and/or NoSQL databases\"\n",
|
412 |
+
" ],\n",
|
413 |
+
" \"description\": \"Design, develop and deploy machine learning models, algorithms and systems to solve complex business problems for Myntra Recsys, Search, Vision, SCM, Pricing, Forecasting, Trend and Virality prediction, Gen AI and other areas. Theoretical understanding and practise of machine learning and expertise in one or more of the topics, such as, NLP, Computer Vision, recommender systems and Optimisation.\"\n",
|
414 |
+
" }\n",
|
415 |
+
"]\n"
|
416 |
+
]
|
417 |
+
}
|
418 |
+
],
|
419 |
+
"source": [
|
420 |
+
"chain_extract = prompt_extract | llm # this will form a langchain chain ie you are getting a prompt and passing it to LLM \n",
|
421 |
+
"res = chain_extract.invoke(input={'page_data':page_data})\n",
|
422 |
+
"print(res.content)\n",
|
423 |
+
"\n",
|
424 |
+
"# we got the json format of the job description"
|
425 |
+
]
|
426 |
+
},
|
427 |
+
{
|
428 |
+
"cell_type": "code",
|
429 |
+
"execution_count": 7,
|
430 |
+
"id": "c0213559-8127-4ce4-90b9-8ad913fa5b69",
|
431 |
+
"metadata": {},
|
432 |
+
"outputs": [
|
433 |
+
{
|
434 |
+
"data": {
|
435 |
+
"text/plain": [
|
436 |
+
"str"
|
437 |
+
]
|
438 |
+
},
|
439 |
+
"execution_count": 7,
|
440 |
+
"metadata": {},
|
441 |
+
"output_type": "execute_result"
|
442 |
+
}
|
443 |
+
],
|
444 |
+
"source": [
|
445 |
+
"# but the type of it is string, we want json object so we will use JSON Parser\n",
|
446 |
+
"type(res.content)"
|
447 |
+
]
|
448 |
+
},
|
449 |
+
{
|
450 |
+
"cell_type": "code",
|
451 |
+
"execution_count": 8,
|
452 |
+
"id": "5415fd54",
|
453 |
+
"metadata": {},
|
454 |
+
"outputs": [
|
455 |
+
{
|
456 |
+
"data": {
|
457 |
+
"text/plain": [
|
458 |
+
"[{'role': 'Data Scientist',\n",
|
459 |
+
" 'experience': '1+ years of relevant industry experience with a Bachelor’s degree or Master’s/PhD in Computer Science, Mathematics, Statistics/related fields',\n",
|
460 |
+
" 'skills': ['Python or one other high-level programming language',\n",
|
461 |
+
" 'Theoretical understanding of statistical models such as regression, clustering and ML algorithms such as decision trees, neural networks, etc.',\n",
|
462 |
+
" 'Machine learning frameworks like TensorFlow, PyTorch, or scikit-learn',\n",
|
463 |
+
" 'SQL and/or NoSQL databases'],\n",
|
464 |
+
" 'description': 'Design, develop and deploy machine learning models, algorithms and systems to solve complex business problems for Myntra Recsys, Search, Vision, SCM, Pricing, Forecasting, Trend and Virality prediction, Gen AI and other areas. Theoretical understanding and practise of machine learning and expertise in one or more of the topics, such as, NLP, Computer Vision, recommender systems and Optimisation.'}]"
|
465 |
+
]
|
466 |
+
},
|
467 |
+
"execution_count": 8,
|
468 |
+
"metadata": {},
|
469 |
+
"output_type": "execute_result"
|
470 |
+
}
|
471 |
+
],
|
472 |
+
"source": [
|
473 |
+
"from langchain_core.output_parsers import JsonOutputParser\n",
|
474 |
+
"\n",
|
475 |
+
"json_parser = JsonOutputParser()\n",
|
476 |
+
"json_res = json_parser.parse(res.content)\n",
|
477 |
+
"json_res"
|
478 |
+
]
|
479 |
+
},
|
480 |
+
{
|
481 |
+
"cell_type": "code",
|
482 |
+
"execution_count": 9,
|
483 |
+
"id": "c4226c86-9f8c-4206-9706-c4d93724a584",
|
484 |
+
"metadata": {},
|
485 |
+
"outputs": [
|
486 |
+
{
|
487 |
+
"data": {
|
488 |
+
"text/plain": [
|
489 |
+
"1"
|
490 |
+
]
|
491 |
+
},
|
492 |
+
"execution_count": 9,
|
493 |
+
"metadata": {},
|
494 |
+
"output_type": "execute_result"
|
495 |
+
}
|
496 |
+
],
|
497 |
+
"source": [
|
498 |
+
"len(json_res)"
|
499 |
+
]
|
500 |
+
},
|
501 |
+
{
|
502 |
+
"cell_type": "code",
|
503 |
+
"execution_count": 10,
|
504 |
+
"id": "39961ed6",
|
505 |
+
"metadata": {},
|
506 |
+
"outputs": [
|
507 |
+
{
|
508 |
+
"data": {
|
509 |
+
"text/plain": [
|
510 |
+
"list"
|
511 |
+
]
|
512 |
+
},
|
513 |
+
"execution_count": 10,
|
514 |
+
"metadata": {},
|
515 |
+
"output_type": "execute_result"
|
516 |
+
}
|
517 |
+
],
|
518 |
+
"source": [
|
519 |
+
"type(json_res)\n",
|
520 |
+
"# but we want a dictionary"
|
521 |
+
]
|
522 |
+
},
|
523 |
+
{
|
524 |
+
"cell_type": "code",
|
525 |
+
"execution_count": 11,
|
526 |
+
"id": "eb173c02-93d5-4cff-8763-483834fc7c5c",
|
527 |
+
"metadata": {},
|
528 |
+
"outputs": [],
|
529 |
+
"source": [
|
530 |
+
"# Check if the result is a list and extract the first dictionary\n",
|
531 |
+
"if isinstance(json_res, list):\n",
|
532 |
+
" json_res = json_res[0]"
|
533 |
+
]
|
534 |
+
},
|
535 |
+
{
|
536 |
+
"cell_type": "code",
|
537 |
+
"execution_count": 12,
|
538 |
+
"id": "0614b58c-7ac4-48ad-a20a-69180d759b93",
|
539 |
+
"metadata": {},
|
540 |
+
"outputs": [
|
541 |
+
{
|
542 |
+
"data": {
|
543 |
+
"text/plain": [
|
544 |
+
"{'role': 'Data Scientist',\n",
|
545 |
+
" 'experience': '1+ years of relevant industry experience with a Bachelor’s degree or Master’s/PhD in Computer Science, Mathematics, Statistics/related fields',\n",
|
546 |
+
" 'skills': ['Python or one other high-level programming language',\n",
|
547 |
+
" 'Theoretical understanding of statistical models such as regression, clustering and ML algorithms such as decision trees, neural networks, etc.',\n",
|
548 |
+
" 'Machine learning frameworks like TensorFlow, PyTorch, or scikit-learn',\n",
|
549 |
+
" 'SQL and/or NoSQL databases'],\n",
|
550 |
+
" 'description': 'Design, develop and deploy machine learning models, algorithms and systems to solve complex business problems for Myntra Recsys, Search, Vision, SCM, Pricing, Forecasting, Trend and Virality prediction, Gen AI and other areas. Theoretical understanding and practise of machine learning and expertise in one or more of the topics, such as, NLP, Computer Vision, recommender systems and Optimisation.'}"
|
551 |
+
]
|
552 |
+
},
|
553 |
+
"execution_count": 12,
|
554 |
+
"metadata": {},
|
555 |
+
"output_type": "execute_result"
|
556 |
+
}
|
557 |
+
],
|
558 |
+
"source": [
|
559 |
+
"json_res"
|
560 |
+
]
|
561 |
+
},
|
562 |
+
{
|
563 |
+
"cell_type": "code",
|
564 |
+
"execution_count": 13,
|
565 |
+
"id": "62c524d8-3e3a-4922-af5b-4874307298f0",
|
566 |
+
"metadata": {},
|
567 |
+
"outputs": [],
|
568 |
+
"source": [
|
569 |
+
"# now its a dicitionary"
|
570 |
+
]
|
571 |
+
},
|
572 |
+
{
|
573 |
+
"cell_type": "code",
|
574 |
+
"execution_count": 14,
|
575 |
+
"id": "1e8a0f74",
|
576 |
+
"metadata": {},
|
577 |
+
"outputs": [
|
578 |
+
{
|
579 |
+
"data": {
|
580 |
+
"text/html": [
|
581 |
+
"<div>\n",
|
582 |
+
"<style scoped>\n",
|
583 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
584 |
+
" vertical-align: middle;\n",
|
585 |
+
" }\n",
|
586 |
+
"\n",
|
587 |
+
" .dataframe tbody tr th {\n",
|
588 |
+
" vertical-align: top;\n",
|
589 |
+
" }\n",
|
590 |
+
"\n",
|
591 |
+
" .dataframe thead th {\n",
|
592 |
+
" text-align: right;\n",
|
593 |
+
" }\n",
|
594 |
+
"</style>\n",
|
595 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
596 |
+
" <thead>\n",
|
597 |
+
" <tr style=\"text-align: right;\">\n",
|
598 |
+
" <th></th>\n",
|
599 |
+
" <th>Techstack</th>\n",
|
600 |
+
" <th>Links</th>\n",
|
601 |
+
" </tr>\n",
|
602 |
+
" </thead>\n",
|
603 |
+
" <tbody>\n",
|
604 |
+
" <tr>\n",
|
605 |
+
" <th>0</th>\n",
|
606 |
+
" <td>Machine Learning, ML, Python</td>\n",
|
607 |
+
" <td>https://github.com/MandarBhalerao/Gurgaon-Real...</td>\n",
|
608 |
+
" </tr>\n",
|
609 |
+
" <tr>\n",
|
610 |
+
" <th>1</th>\n",
|
611 |
+
" <td>Recommendation System, Python</td>\n",
|
612 |
+
" <td>https://github.com/MandarBhalerao/Movie-Recomm...</td>\n",
|
613 |
+
" </tr>\n",
|
614 |
+
" <tr>\n",
|
615 |
+
" <th>2</th>\n",
|
616 |
+
" <td>C++, CUDA</td>\n",
|
617 |
+
" <td>https://github.com/MandarBhalerao/Dilated-Conv...</td>\n",
|
618 |
+
" </tr>\n",
|
619 |
+
" <tr>\n",
|
620 |
+
" <th>3</th>\n",
|
621 |
+
" <td>React, Node.js, MongoDB</td>\n",
|
622 |
+
" <td>https://example.com/react-portfolio</td>\n",
|
623 |
+
" </tr>\n",
|
624 |
+
" <tr>\n",
|
625 |
+
" <th>4</th>\n",
|
626 |
+
" <td>Angular,.NET, SQL Server</td>\n",
|
627 |
+
" <td>https://example.com/angular-portfolio</td>\n",
|
628 |
+
" </tr>\n",
|
629 |
+
" <tr>\n",
|
630 |
+
" <th>5</th>\n",
|
631 |
+
" <td>Vue.js, Ruby on Rails, PostgreSQL</td>\n",
|
632 |
+
" <td>https://example.com/vue-portfolio</td>\n",
|
633 |
+
" </tr>\n",
|
634 |
+
" <tr>\n",
|
635 |
+
" <th>6</th>\n",
|
636 |
+
" <td>Java, Spring Boot, Oracle</td>\n",
|
637 |
+
" <td>https://example.com/java-portfolio</td>\n",
|
638 |
+
" </tr>\n",
|
639 |
+
" <tr>\n",
|
640 |
+
" <th>7</th>\n",
|
641 |
+
" <td>Flutter, Firebase, GraphQL</td>\n",
|
642 |
+
" <td>https://example.com/flutter-portfolio</td>\n",
|
643 |
+
" </tr>\n",
|
644 |
+
" <tr>\n",
|
645 |
+
" <th>8</th>\n",
|
646 |
+
" <td>WordPress, PHP, MySQL</td>\n",
|
647 |
+
" <td>https://example.com/wordpress-portfolio</td>\n",
|
648 |
+
" </tr>\n",
|
649 |
+
" <tr>\n",
|
650 |
+
" <th>9</th>\n",
|
651 |
+
" <td>Magento, PHP, MySQL</td>\n",
|
652 |
+
" <td>https://example.com/magento-portfolio</td>\n",
|
653 |
+
" </tr>\n",
|
654 |
+
" <tr>\n",
|
655 |
+
" <th>10</th>\n",
|
656 |
+
" <td>React Native, Node.js, MongoDB</td>\n",
|
657 |
+
" <td>https://example.com/react-native-portfolio</td>\n",
|
658 |
+
" </tr>\n",
|
659 |
+
" <tr>\n",
|
660 |
+
" <th>11</th>\n",
|
661 |
+
" <td>iOS, Swift, Core Data</td>\n",
|
662 |
+
" <td>https://example.com/ios-portfolio</td>\n",
|
663 |
+
" </tr>\n",
|
664 |
+
" <tr>\n",
|
665 |
+
" <th>12</th>\n",
|
666 |
+
" <td>Android, Java, Room Persistence</td>\n",
|
667 |
+
" <td>https://example.com/android-portfolio</td>\n",
|
668 |
+
" </tr>\n",
|
669 |
+
" <tr>\n",
|
670 |
+
" <th>13</th>\n",
|
671 |
+
" <td>Kotlin, Android, Firebase</td>\n",
|
672 |
+
" <td>https://example.com/kotlin-android-portfolio</td>\n",
|
673 |
+
" </tr>\n",
|
674 |
+
" <tr>\n",
|
675 |
+
" <th>14</th>\n",
|
676 |
+
" <td>Android TV, Kotlin, Android NDK</td>\n",
|
677 |
+
" <td>https://example.com/android-tv-portfolio</td>\n",
|
678 |
+
" </tr>\n",
|
679 |
+
" <tr>\n",
|
680 |
+
" <th>15</th>\n",
|
681 |
+
" <td>iOS, Swift, ARKit</td>\n",
|
682 |
+
" <td>https://example.com/ios-ar-portfolio</td>\n",
|
683 |
+
" </tr>\n",
|
684 |
+
" <tr>\n",
|
685 |
+
" <th>16</th>\n",
|
686 |
+
" <td>Cross-platform, Xamarin, Azure</td>\n",
|
687 |
+
" <td>https://example.com/xamarin-portfolio</td>\n",
|
688 |
+
" </tr>\n",
|
689 |
+
" <tr>\n",
|
690 |
+
" <th>17</th>\n",
|
691 |
+
" <td>Backend, Kotlin, Spring Boot</td>\n",
|
692 |
+
" <td>https://example.com/kotlin-backend-portfolio</td>\n",
|
693 |
+
" </tr>\n",
|
694 |
+
" <tr>\n",
|
695 |
+
" <th>18</th>\n",
|
696 |
+
" <td>Frontend, TypeScript, Angular</td>\n",
|
697 |
+
" <td>https://example.com/typescript-frontend-portfolio</td>\n",
|
698 |
+
" </tr>\n",
|
699 |
+
" <tr>\n",
|
700 |
+
" <th>19</th>\n",
|
701 |
+
" <td>Full-stack, JavaScript, Express.js</td>\n",
|
702 |
+
" <td>https://example.com/full-stack-js-portfolio</td>\n",
|
703 |
+
" </tr>\n",
|
704 |
+
" <tr>\n",
|
705 |
+
" <th>20</th>\n",
|
706 |
+
" <td>DevOps, Jenkins, Docker</td>\n",
|
707 |
+
" <td>https://example.com/devops-portfolio</td>\n",
|
708 |
+
" </tr>\n",
|
709 |
+
" </tbody>\n",
|
710 |
+
"</table>\n",
|
711 |
+
"</div>"
|
712 |
+
],
|
713 |
+
"text/plain": [
|
714 |
+
" Techstack \\\n",
|
715 |
+
"0 Machine Learning, ML, Python \n",
|
716 |
+
"1 Recommendation System, Python \n",
|
717 |
+
"2 C++, CUDA \n",
|
718 |
+
"3 React, Node.js, MongoDB \n",
|
719 |
+
"4 Angular,.NET, SQL Server \n",
|
720 |
+
"5 Vue.js, Ruby on Rails, PostgreSQL \n",
|
721 |
+
"6 Java, Spring Boot, Oracle \n",
|
722 |
+
"7 Flutter, Firebase, GraphQL \n",
|
723 |
+
"8 WordPress, PHP, MySQL \n",
|
724 |
+
"9 Magento, PHP, MySQL \n",
|
725 |
+
"10 React Native, Node.js, MongoDB \n",
|
726 |
+
"11 iOS, Swift, Core Data \n",
|
727 |
+
"12 Android, Java, Room Persistence \n",
|
728 |
+
"13 Kotlin, Android, Firebase \n",
|
729 |
+
"14 Android TV, Kotlin, Android NDK \n",
|
730 |
+
"15 iOS, Swift, ARKit \n",
|
731 |
+
"16 Cross-platform, Xamarin, Azure \n",
|
732 |
+
"17 Backend, Kotlin, Spring Boot \n",
|
733 |
+
"18 Frontend, TypeScript, Angular \n",
|
734 |
+
"19 Full-stack, JavaScript, Express.js \n",
|
735 |
+
"20 DevOps, Jenkins, Docker \n",
|
736 |
+
"\n",
|
737 |
+
" Links \n",
|
738 |
+
"0 https://github.com/MandarBhalerao/Gurgaon-Real... \n",
|
739 |
+
"1 https://github.com/MandarBhalerao/Movie-Recomm... \n",
|
740 |
+
"2 https://github.com/MandarBhalerao/Dilated-Conv... \n",
|
741 |
+
"3 https://example.com/react-portfolio \n",
|
742 |
+
"4 https://example.com/angular-portfolio \n",
|
743 |
+
"5 https://example.com/vue-portfolio \n",
|
744 |
+
"6 https://example.com/java-portfolio \n",
|
745 |
+
"7 https://example.com/flutter-portfolio \n",
|
746 |
+
"8 https://example.com/wordpress-portfolio \n",
|
747 |
+
"9 https://example.com/magento-portfolio \n",
|
748 |
+
"10 https://example.com/react-native-portfolio \n",
|
749 |
+
"11 https://example.com/ios-portfolio \n",
|
750 |
+
"12 https://example.com/android-portfolio \n",
|
751 |
+
"13 https://example.com/kotlin-android-portfolio \n",
|
752 |
+
"14 https://example.com/android-tv-portfolio \n",
|
753 |
+
"15 https://example.com/ios-ar-portfolio \n",
|
754 |
+
"16 https://example.com/xamarin-portfolio \n",
|
755 |
+
"17 https://example.com/kotlin-backend-portfolio \n",
|
756 |
+
"18 https://example.com/typescript-frontend-portfolio \n",
|
757 |
+
"19 https://example.com/full-stack-js-portfolio \n",
|
758 |
+
"20 https://example.com/devops-portfolio "
|
759 |
+
]
|
760 |
+
},
|
761 |
+
"execution_count": 14,
|
762 |
+
"metadata": {},
|
763 |
+
"output_type": "execute_result"
|
764 |
+
}
|
765 |
+
],
|
766 |
+
"source": [
|
767 |
+
"# so whenever there is a job posting, we will extract this skills from the job \n",
|
768 |
+
"# and we will match it with one or multiple of these technologies mentioned in the csv file and it will retrive those portfolio urls\n",
|
769 |
+
"# which we will use while writing an email\n",
|
770 |
+
"\n",
|
771 |
+
"import pandas as pd\n",
|
772 |
+
"\n",
|
773 |
+
"df = pd.read_csv(\"my_portfolio.csv\")\n",
|
774 |
+
"df"
|
775 |
+
]
|
776 |
+
},
|
777 |
+
{
|
778 |
+
"cell_type": "code",
|
779 |
+
"execution_count": 15,
|
780 |
+
"id": "f7e888d4",
|
781 |
+
"metadata": {},
|
782 |
+
"outputs": [],
|
783 |
+
"source": [
|
784 |
+
"import uuid\n",
|
785 |
+
"import chromadb\n",
|
786 |
+
"\n",
|
787 |
+
"# when you use Client, it will create a chromadb in memory\n",
|
788 |
+
"# but when we use PersistentClient it will create a chromadb on a disk ie it will be stored in our current folder so that we can retrive it anytime\n",
|
789 |
+
"client = chromadb.PersistentClient('vectorstore')\n",
|
790 |
+
"collection = client.get_or_create_collection(name=\"portfolio\")\n",
|
791 |
+
"\n",
|
792 |
+
"\n",
|
793 |
+
"if not collection.count(): # this means if collection does not have any count ie if it is being created for the first time\n",
|
794 |
+
" for _, row in df.iterrows(): # then you iterate through all your dataframe rows, and for each row, you are adding a document\n",
|
795 |
+
" collection.add(documents=row[\"Techstack\"],\n",
|
796 |
+
" metadatas={\"links\": row[\"Links\"]},\n",
|
797 |
+
" ids=[str(uuid.uuid4())])"
|
798 |
+
]
|
799 |
+
},
|
800 |
+
{
|
801 |
+
"cell_type": "code",
|
802 |
+
"execution_count": 16,
|
803 |
+
"id": "32d50152-4ddb-49e2-9143-c589a82fd137",
|
804 |
+
"metadata": {},
|
805 |
+
"outputs": [],
|
806 |
+
"source": [
|
807 |
+
"# a folder named vectorstore will be created and data will be stored there"
|
808 |
+
]
|
809 |
+
},
|
810 |
+
{
|
811 |
+
"cell_type": "code",
|
812 |
+
"execution_count": 17,
|
813 |
+
"id": "690b55b3-c5dc-4f83-93d0-a6aa2d34cff4",
|
814 |
+
"metadata": {},
|
815 |
+
"outputs": [],
|
816 |
+
"source": [
|
817 |
+
"job = json_res"
|
818 |
+
]
|
819 |
+
},
|
820 |
+
{
|
821 |
+
"cell_type": "code",
|
822 |
+
"execution_count": 18,
|
823 |
+
"id": "39ad2fa2",
|
824 |
+
"metadata": {},
|
825 |
+
"outputs": [
|
826 |
+
{
|
827 |
+
"data": {
|
828 |
+
"text/plain": [
|
829 |
+
"[[{'links': 'https://example.com/ml-python-portfolio'},\n",
|
830 |
+
" {'links': 'https://example.com/python-portfolio'}],\n",
|
831 |
+
" [{'links': 'https://example.com/ml-python-portfolio'},\n",
|
832 |
+
" {'links': 'https://example.com/python-portfolio'}],\n",
|
833 |
+
" [{'links': 'https://example.com/ml-python-portfolio'},\n",
|
834 |
+
" {'links': 'https://example.com/ios-ar-portfolio'}],\n",
|
835 |
+
" [{'links': 'https://example.com/magento-portfolio'},\n",
|
836 |
+
" {'links': 'https://example.com/angular-portfolio'}]]"
|
837 |
+
]
|
838 |
+
},
|
839 |
+
"execution_count": 18,
|
840 |
+
"metadata": {},
|
841 |
+
"output_type": "execute_result"
|
842 |
+
}
|
843 |
+
],
|
844 |
+
"source": [
|
845 |
+
"# just making a query and checking\n",
|
846 |
+
"\n",
|
847 |
+
"links = collection.query(query_texts=job['skills'], n_results=2).get('metadatas', [])\n",
|
848 |
+
"links"
|
849 |
+
]
|
850 |
+
},
|
851 |
+
{
|
852 |
+
"cell_type": "code",
|
853 |
+
"execution_count": 19,
|
854 |
+
"id": "8bd36844",
|
855 |
+
"metadata": {},
|
856 |
+
"outputs": [
|
857 |
+
{
|
858 |
+
"data": {
|
859 |
+
"text/plain": [
|
860 |
+
"{'role': 'Data Scientist',\n",
|
861 |
+
" 'experience': '1+ years of relevant industry experience with a Bachelor’s degree or Master’s/PhD in Computer Science, Mathematics, Statistics/related fields',\n",
|
862 |
+
" 'skills': ['Python or one other high-level programming language',\n",
|
863 |
+
" 'Theoretical understanding of statistical models such as regression, clustering and ML algorithms such as decision trees, neural networks, etc.',\n",
|
864 |
+
" 'Machine learning frameworks like TensorFlow, PyTorch, or scikit-learn',\n",
|
865 |
+
" 'SQL and/or NoSQL databases'],\n",
|
866 |
+
" 'description': 'Design, develop and deploy machine learning models, algorithms and systems to solve complex business problems for Myntra Recsys, Search, Vision, SCM, Pricing, Forecasting, Trend and Virality prediction, Gen AI and other areas. Theoretical understanding and practise of machine learning and expertise in one or more of the topics, such as, NLP, Computer Vision, recommender systems and Optimisation.'}"
|
867 |
+
]
|
868 |
+
},
|
869 |
+
"execution_count": 19,
|
870 |
+
"metadata": {},
|
871 |
+
"output_type": "execute_result"
|
872 |
+
}
|
873 |
+
],
|
874 |
+
"source": [
|
875 |
+
"job"
|
876 |
+
]
|
877 |
+
},
|
878 |
+
{
|
879 |
+
"cell_type": "code",
|
880 |
+
"execution_count": 20,
|
881 |
+
"id": "1ccfd720",
|
882 |
+
"metadata": {},
|
883 |
+
"outputs": [
|
884 |
+
{
|
885 |
+
"data": {
|
886 |
+
"text/plain": [
|
887 |
+
"['Python or one other high-level programming language',\n",
|
888 |
+
" 'Theoretical understanding of statistical models such as regression, clustering and ML algorithms such as decision trees, neural networks, etc.',\n",
|
889 |
+
" 'Machine learning frameworks like TensorFlow, PyTorch, or scikit-learn',\n",
|
890 |
+
" 'SQL and/or NoSQL databases']"
|
891 |
+
]
|
892 |
+
},
|
893 |
+
"execution_count": 20,
|
894 |
+
"metadata": {},
|
895 |
+
"output_type": "execute_result"
|
896 |
+
}
|
897 |
+
],
|
898 |
+
"source": [
|
899 |
+
"job['skills']"
|
900 |
+
]
|
901 |
+
},
|
902 |
+
{
|
903 |
+
"cell_type": "code",
|
904 |
+
"execution_count": 21,
|
905 |
+
"id": "64a97dd2",
|
906 |
+
"metadata": {},
|
907 |
+
"outputs": [],
|
908 |
+
"source": [
|
909 |
+
"# this is prompt template for writing an email\n",
|
910 |
+
"\n",
|
911 |
+
"prompt_email = PromptTemplate.from_template(\n",
|
912 |
+
" \"\"\"\n",
|
913 |
+
" ### JOB DESCRIPTION:\n",
|
914 |
+
" {job_description}\n",
|
915 |
+
"\n",
|
916 |
+
" ### INSTRUCTION:\n",
|
917 |
+
" You are Mandar Bhalerao, an MTech student at the Indian Institute of Science, Bangalore, focusing on Computer Science and Automation. Your academic journey is complemented by hands-on internships where you've applied cutting-edge machine learning and deep learning techniques to real-world problems.\n",
|
918 |
+
"\n",
|
919 |
+
" Your task is to write a cold email to the hiring manager detailing your experiences and projects that highlight your expertise in AI and machine learning. Start with introducing yourself using the above details and then discuss your role in enhancing the performance of Stable Diffusion models by using Knowledge Distillation Techniques at NeuroPixel.AI , achieving a 30 percent reduction in inference steps. Elaborate on your project, \"Gurgaon Real Estate Price Prediction,\" where you implemented advanced machine learning models to achieve an R² score of 0.90 and developed a dual-layer recommendation system.\n",
|
920 |
+
"\n",
|
921 |
+
" Also, include your experience at Western Union, where you used Quantum Metric to improve user experience design, increasing conversion rates by 10%. Provide insights into your technical skills, particularly in Python and C++, and how these have supported your project implementations.\n",
|
922 |
+
"\n",
|
923 |
+
" Remember, you are Mandar, with a strong foundation in theoretical knowledge and practical application of machine learning, deep learning and AI technologies. Discuss the methodologies you employed, the challenges you overcame, and the real-world impact of your projects.\n",
|
924 |
+
" \n",
|
925 |
+
" Your job is to write a cold email to the hiring manager regarding the job mentioned above describing the capability of you \n",
|
926 |
+
" in fulfilling their needs.\n",
|
927 |
+
" \n",
|
928 |
+
" Also add the most relevant ones from the following links to showcase Mandar's work in these domains: {link_list}\n",
|
929 |
+
" Remember you are Mandar Bhalerao, an MTech student at the Indian Institute of Science, Bangalore.\n",
|
930 |
+
" End the email with Mandar Bhalerao, (new line) MTech in Computer Science and Automation, (new line) IISc Bangalore. \n",
|
931 |
+
" Do not provide a preamble.\n",
|
932 |
+
" ### EMAIL (NO PREAMBLE):\n",
|
933 |
+
"\n",
|
934 |
+
" \"\"\"\n",
|
935 |
+
" )\n",
|
936 |
+
"\n",
|
937 |
+
"# the things inside curly brackets like {link_list}, then this is something we will give as an argument to a prompt template"
|
938 |
+
]
|
939 |
+
},
|
940 |
+
{
|
941 |
+
"cell_type": "code",
|
942 |
+
"execution_count": 22,
|
943 |
+
"id": "573c8103-f196-42a7-88f8-3a367d28b4c3",
|
944 |
+
"metadata": {},
|
945 |
+
"outputs": [
|
946 |
+
{
|
947 |
+
"name": "stdout",
|
948 |
+
"output_type": "stream",
|
949 |
+
"text": [
|
950 |
+
"Subject: Application for Data Scientist Role at Myntra\n",
|
951 |
+
"\n",
|
952 |
+
"Dear Hiring Manager,\n",
|
953 |
+
"\n",
|
954 |
+
"I am Mandar Bhalerao, an MTech student at the Indian Institute of Science, Bangalore, with a strong foundation in Computer Science and Automation. I am excited to apply for the Data Scientist role at Myntra, where I can leverage my expertise in machine learning and AI to drive business growth.\n",
|
955 |
+
"\n",
|
956 |
+
"As a hands-on practitioner with a solid theoretical understanding of statistical models and machine learning algorithms, I am confident in my ability to design, develop, and deploy models that solve complex business problems. My experience in applying cutting-edge techniques to real-world problems has equipped me with the skills to tackle challenges in areas such as NLP, Computer Vision, recommender systems, and optimization.\n",
|
957 |
+
"\n",
|
958 |
+
"One of my notable projects was at NeuroPixel.AI, where I worked on enhancing the performance of Stable Diffusion models using Knowledge Distillation Techniques. By employing this approach, I achieved a 30% reduction in inference steps, significantly improving the model's efficiency. This project showcased my ability to apply theoretical knowledge to practical problems and drive tangible results.\n",
|
959 |
+
"\n",
|
960 |
+
"Another project that highlights my expertise is the \"Gurgaon Real Estate Price Prediction\" model, where I implemented advanced machine learning models to achieve an R² score of 0.90. Additionally, I developed a dual-layer recommendation system that demonstrated my ability to design and deploy complex systems. This project showcased my skills in data analysis, model development, and system design.\n",
|
961 |
+
"\n",
|
962 |
+
"In my previous internship at Western Union, I utilized Quantum Metric to improve user experience design, resulting in a 10% increase in conversion rates. This experience demonstrated my ability to apply data-driven insights to drive business outcomes.\n",
|
963 |
+
"\n",
|
964 |
+
"From a technical standpoint, I am proficient in Python and C++, which have been instrumental in supporting my project implementations. My expertise in these programming languages has enabled me to develop and deploy efficient models that drive business results.\n",
|
965 |
+
"\n",
|
966 |
+
"To showcase my work, I would like to share the following relevant links:\n",
|
967 |
+
"\n",
|
968 |
+
"- https://example.com/ml-python-portfolio\n",
|
969 |
+
"- https://example.com/python-portfolio\n",
|
970 |
+
"\n",
|
971 |
+
"These links demonstrate my capabilities in machine learning and Python, which are essential skills for the Data Scientist role at Myntra.\n",
|
972 |
+
"\n",
|
973 |
+
"I am excited about the opportunity to join Myntra and contribute my skills and expertise to drive business growth. Thank you for considering my application.\n",
|
974 |
+
"\n",
|
975 |
+
"Mandar Bhalerao\n",
|
976 |
+
"MTech in Computer Science and Automation\n",
|
977 |
+
"IISc Bangalore\n"
|
978 |
+
]
|
979 |
+
}
|
980 |
+
],
|
981 |
+
"source": [
|
982 |
+
"# again creating a chain of prompt_email and llm\n",
|
983 |
+
"# invoking the chain by passing the parameter of job_description and link_list\n",
|
984 |
+
"\n",
|
985 |
+
"chain_email = prompt_email | llm\n",
|
986 |
+
"res = chain_email.invoke({\"job_description\": str(job), \"link_list\": links})\n",
|
987 |
+
"print(res.content)"
|
988 |
+
]
|
989 |
+
},
|
990 |
+
{
|
991 |
+
"cell_type": "code",
|
992 |
+
"execution_count": null,
|
993 |
+
"id": "26a9536d-5fa6-42cf-884d-78fbd637d412",
|
994 |
+
"metadata": {},
|
995 |
+
"outputs": [],
|
996 |
+
"source": []
|
997 |
+
},
|
998 |
+
{
|
999 |
+
"cell_type": "code",
|
1000 |
+
"execution_count": null,
|
1001 |
+
"id": "b3e94e8e-428a-494b-b37b-2751215426b7",
|
1002 |
+
"metadata": {},
|
1003 |
+
"outputs": [],
|
1004 |
+
"source": []
|
1005 |
+
},
|
1006 |
+
{
|
1007 |
+
"cell_type": "code",
|
1008 |
+
"execution_count": null,
|
1009 |
+
"id": "c319ba1f-6dea-4fe8-98eb-319f4187b0e0",
|
1010 |
+
"metadata": {},
|
1011 |
+
"outputs": [],
|
1012 |
+
"source": []
|
1013 |
+
}
|
1014 |
+
],
|
1015 |
+
"metadata": {
|
1016 |
+
"kernelspec": {
|
1017 |
+
"display_name": "Python 3 (ipykernel)",
|
1018 |
+
"language": "python",
|
1019 |
+
"name": "python3"
|
1020 |
+
},
|
1021 |
+
"language_info": {
|
1022 |
+
"codemirror_mode": {
|
1023 |
+
"name": "ipython",
|
1024 |
+
"version": 3
|
1025 |
+
},
|
1026 |
+
"file_extension": ".py",
|
1027 |
+
"mimetype": "text/x-python",
|
1028 |
+
"name": "python",
|
1029 |
+
"nbconvert_exporter": "python",
|
1030 |
+
"pygments_lexer": "ipython3",
|
1031 |
+
"version": "3.10.7"
|
1032 |
+
}
|
1033 |
+
},
|
1034 |
+
"nbformat": 4,
|
1035 |
+
"nbformat_minor": 5
|
1036 |
+
}
|
experimenting.ipynb
ADDED
@@ -0,0 +1,712 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 66,
|
6 |
+
"id": "4121c69d-771c-4296-b336-402871727af1",
|
7 |
+
"metadata": {},
|
8 |
+
"outputs": [],
|
9 |
+
"source": [
|
10 |
+
"import pdfplumber\n",
|
11 |
+
"import re\n",
|
12 |
+
"from langchain_groq import ChatGroq"
|
13 |
+
]
|
14 |
+
},
|
15 |
+
{
|
16 |
+
"cell_type": "code",
|
17 |
+
"execution_count": 67,
|
18 |
+
"id": "266d57e3-ab44-44d2-a3bc-a1574b5189e7",
|
19 |
+
"metadata": {},
|
20 |
+
"outputs": [],
|
21 |
+
"source": [
|
22 |
+
"llm = ChatGroq(\n",
|
23 |
+
" temperature=0, \n",
|
24 |
+
" groq_api_key='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', \n",
|
25 |
+
" model_name=\"llama-3.1-70b-versatile\"\n",
|
26 |
+
")"
|
27 |
+
]
|
28 |
+
},
|
29 |
+
{
|
30 |
+
"cell_type": "code",
|
31 |
+
"execution_count": 68,
|
32 |
+
"id": "c0967c38-3fab-4588-b5aa-d97e65697a03",
|
33 |
+
"metadata": {},
|
34 |
+
"outputs": [],
|
35 |
+
"source": [
|
36 |
+
"def extract_text_from_pdf(pdf_path):\n",
|
37 |
+
" with pdfplumber.open(pdf_path) as pdf:\n",
|
38 |
+
" pages = [page.extract_text() for page in pdf.pages]\n",
|
39 |
+
" all_text = \"\\n\".join(pages) if pages else \"\"\n",
|
40 |
+
" # print(all_text)\n",
|
41 |
+
" return all_text"
|
42 |
+
]
|
43 |
+
},
|
44 |
+
{
|
45 |
+
"cell_type": "code",
|
46 |
+
"execution_count": 69,
|
47 |
+
"id": "11105d9a-c009-48f0-a6ca-e59eb8378497",
|
48 |
+
"metadata": {},
|
49 |
+
"outputs": [],
|
50 |
+
"source": [
|
51 |
+
"pdf_path = \"C:/Users/Admin/Downloads/Mandar_Bhalerao_IISc.pdf\"\n",
|
52 |
+
"pdf_data = extract_text_from_pdf(pdf_path)\n",
|
53 |
+
"# output_path = process_resume(pdf_path)\n",
|
54 |
+
"# print(f\"Cold email prompt saved at: {output_path}\")"
|
55 |
+
]
|
56 |
+
},
|
57 |
+
{
|
58 |
+
"cell_type": "code",
|
59 |
+
"execution_count": 70,
|
60 |
+
"id": "0ae61bb3-d123-4513-bd5c-cb970c68c2f3",
|
61 |
+
"metadata": {},
|
62 |
+
"outputs": [],
|
63 |
+
"source": [
|
64 |
+
"from langchain_core.prompts import PromptTemplate\n",
|
65 |
+
"# (NO PREAMBLE) means dont give that initial text like Here is your response.\n",
|
66 |
+
"prompt_extract = PromptTemplate.from_template(\n",
|
67 |
+
" \"\"\"\n",
|
68 |
+
" ### PDF DATA OBTAINED FROM RESUME:\n",
|
69 |
+
" {pdf_data}\n",
|
70 |
+
" ### INSTRUCTION:\n",
|
71 |
+
" The data is from the resume of a person.\n",
|
72 |
+
" Your job is to extract all the details of this person and return them in JSON format containing the \n",
|
73 |
+
" following keys: `name`, `education`, `experience`, `projects`,`skills`, and `achievements`.\n",
|
74 |
+
" Only return the valid JSON.\n",
|
75 |
+
" ### VALID JSON (NO PREAMBLE): \n",
|
76 |
+
" \"\"\"\n",
|
77 |
+
")\n",
|
78 |
+
"\n",
|
79 |
+
" # Your job is to extract the job postings and return them in JSON format containing the \n",
|
80 |
+
" # following keys: `role`, `experience`, `skills` and `description`.\n",
|
81 |
+
"\n",
|
82 |
+
"\n",
|
83 |
+
" # \"name\": name,\n",
|
84 |
+
" # \"education\": education,\n",
|
85 |
+
" # \"experience\": experience,\n",
|
86 |
+
" # \"projects\": projects,\n",
|
87 |
+
" # \"skills\": skills,\n",
|
88 |
+
" # \"achievements\": achievements\n",
|
89 |
+
"\n",
|
90 |
+
"\n",
|
91 |
+
"# def generate_cold_email(details):\n",
|
92 |
+
"# return f\"\"\"\n",
|
93 |
+
"# You are {details['name']}, a graduate from {details['education']}. Your professional experience includes {details['experience']}. You have led projects such as {details['projects']} and are skilled in {details['skills']}. You have also achieved {details['achievements']}.\n",
|
94 |
+
"\n",
|
95 |
+
"# Your task is to write a cold email to a potential employer or client, showcasing your skills and experiences detailed above. Mention your hands-on experience with technologies and how you can contribute to solving real-world problems.\n",
|
96 |
+
"\n",
|
97 |
+
"# Remember, you are {details['name']}, ready to make a significant impact in your new role.\n",
|
98 |
+
"# \"\"\""
|
99 |
+
]
|
100 |
+
},
|
101 |
+
{
|
102 |
+
"cell_type": "code",
|
103 |
+
"execution_count": 74,
|
104 |
+
"id": "f533ce30-9101-40a8-9159-3a6a4dd7ea2e",
|
105 |
+
"metadata": {},
|
106 |
+
"outputs": [],
|
107 |
+
"source": [
|
108 |
+
"chain_extract = prompt_extract | llm # this will form a langchain chain ie you are getting a prompt and passing it to LLM \n",
|
109 |
+
"res = chain_extract.invoke(input={'pdf_data':pdf_data})\n",
|
110 |
+
"# print(res.content)\n",
|
111 |
+
"\n",
|
112 |
+
"# we got the json format of the job description"
|
113 |
+
]
|
114 |
+
},
|
115 |
+
{
|
116 |
+
"cell_type": "code",
|
117 |
+
"execution_count": 75,
|
118 |
+
"id": "947778e8-e430-4afc-96ce-2e91765c8bc5",
|
119 |
+
"metadata": {},
|
120 |
+
"outputs": [
|
121 |
+
{
|
122 |
+
"data": {
|
123 |
+
"text/plain": [
|
124 |
+
"str"
|
125 |
+
]
|
126 |
+
},
|
127 |
+
"execution_count": 75,
|
128 |
+
"metadata": {},
|
129 |
+
"output_type": "execute_result"
|
130 |
+
}
|
131 |
+
],
|
132 |
+
"source": [
|
133 |
+
"type(res.content)"
|
134 |
+
]
|
135 |
+
},
|
136 |
+
{
|
137 |
+
"cell_type": "code",
|
138 |
+
"execution_count": 76,
|
139 |
+
"id": "fc69353b-ec60-445c-be0f-af4fc71e0cf8",
|
140 |
+
"metadata": {},
|
141 |
+
"outputs": [
|
142 |
+
{
|
143 |
+
"data": {
|
144 |
+
"text/plain": [
|
145 |
+
"4805"
|
146 |
+
]
|
147 |
+
},
|
148 |
+
"execution_count": 76,
|
149 |
+
"metadata": {},
|
150 |
+
"output_type": "execute_result"
|
151 |
+
}
|
152 |
+
],
|
153 |
+
"source": [
|
154 |
+
"len(res.content)"
|
155 |
+
]
|
156 |
+
},
|
157 |
+
{
|
158 |
+
"cell_type": "code",
|
159 |
+
"execution_count": 77,
|
160 |
+
"id": "d3b88673-4f06-40a2-aad6-c2eea7d1e392",
|
161 |
+
"metadata": {},
|
162 |
+
"outputs": [],
|
163 |
+
"source": [
|
164 |
+
"from langchain_core.output_parsers import JsonOutputParser\n",
|
165 |
+
"\n",
|
166 |
+
"json_parser = JsonOutputParser()\n",
|
167 |
+
"json_res = json_parser.parse(res.content)\n",
|
168 |
+
"# json_res"
|
169 |
+
]
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"cell_type": "code",
|
173 |
+
"execution_count": 78,
|
174 |
+
"id": "a47aa45b-8148-410a-9a0a-420c8ccd771c",
|
175 |
+
"metadata": {},
|
176 |
+
"outputs": [
|
177 |
+
{
|
178 |
+
"data": {
|
179 |
+
"text/plain": [
|
180 |
+
"dict"
|
181 |
+
]
|
182 |
+
},
|
183 |
+
"execution_count": 78,
|
184 |
+
"metadata": {},
|
185 |
+
"output_type": "execute_result"
|
186 |
+
}
|
187 |
+
],
|
188 |
+
"source": [
|
189 |
+
"type(json_res)"
|
190 |
+
]
|
191 |
+
},
|
192 |
+
{
|
193 |
+
"cell_type": "code",
|
194 |
+
"execution_count": 79,
|
195 |
+
"id": "64841177-feaa-4bd3-af09-d90b7656f620",
|
196 |
+
"metadata": {},
|
197 |
+
"outputs": [],
|
198 |
+
"source": [
|
199 |
+
"# # so whenever there is a job posting, we will extract this skills from the job \n",
|
200 |
+
"# # and we will match it with one or multiple of these technologies mentioned in the csv file and it will retrive those portfolio urls\n",
|
201 |
+
"# # which we will use while writing an email\n",
|
202 |
+
"\n",
|
203 |
+
"# import pandas as pd\n",
|
204 |
+
"\n",
|
205 |
+
"# df = pd.read_csv(\"my_portfolio.csv\")\n",
|
206 |
+
"# # df\n",
|
207 |
+
"\n",
|
208 |
+
"\n",
|
209 |
+
"# import uuid\n",
|
210 |
+
"# import chromadb\n",
|
211 |
+
"\n",
|
212 |
+
"# # when you use Client, it will create a chromadb in memory\n",
|
213 |
+
"# # but when we use PersistentClient it will create a chromadb on a disk ie it will be stored in our current folder so that we can retrive it anytime\n",
|
214 |
+
"# client = chromadb.PersistentClient('vectorstore')\n",
|
215 |
+
"# collection = client.get_or_create_collection(name=\"portfolio\")\n",
|
216 |
+
"\n",
|
217 |
+
"\n",
|
218 |
+
"# if not collection.count(): # this means if collection does not have any count ie if it is being created for the first time\n",
|
219 |
+
"# for _, row in df.iterrows(): # then you iterate through all your dataframe rows, and for each row, you are adding a document\n",
|
220 |
+
"# collection.add(documents=row[\"Techstack\"],\n",
|
221 |
+
"# metadatas={\"links\": row[\"Links\"]},\n",
|
222 |
+
"# ids=[str(uuid.uuid4())])"
|
223 |
+
]
|
224 |
+
},
|
225 |
+
{
|
226 |
+
"cell_type": "code",
|
227 |
+
"execution_count": 80,
|
228 |
+
"id": "f226adee-9126-42f8-a158-b87c8b991905",
|
229 |
+
"metadata": {},
|
230 |
+
"outputs": [],
|
231 |
+
"source": [
|
232 |
+
"job = json_res"
|
233 |
+
]
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"cell_type": "code",
|
237 |
+
"execution_count": 81,
|
238 |
+
"id": "632eba44-054f-4c2b-803a-8e6c6f9f7232",
|
239 |
+
"metadata": {},
|
240 |
+
"outputs": [
|
241 |
+
{
|
242 |
+
"data": {
|
243 |
+
"text/plain": [
|
244 |
+
"{'name': 'Mandar Bhalerao',\n",
|
245 |
+
" 'education': [{'institution': 'Indian Institute of Science',\n",
|
246 |
+
" 'degree': 'Master of Technology - Computer Science and Automation',\n",
|
247 |
+
" 'cgpa': '7.30/10.0',\n",
|
248 |
+
" 'duration': 'Aug 2023 – Jul 2025',\n",
|
249 |
+
" 'location': 'Bangalore, Karnataka'},\n",
|
250 |
+
" {'institution': 'P.E.S. Modern College of Engineering',\n",
|
251 |
+
" 'degree': 'Bachelor of Engineering - Computer Engineering',\n",
|
252 |
+
" 'cgpa': '9.34/10.0',\n",
|
253 |
+
" 'duration': 'Aug 2019 – Jul 2023',\n",
|
254 |
+
" 'location': 'Pune, Maharashtra'}],\n",
|
255 |
+
" 'experience': [{'company': 'NeuroPixel.AI',\n",
|
256 |
+
" 'position': 'Deep Learning Research Intern',\n",
|
257 |
+
" 'duration': 'May 2024 – July 2024',\n",
|
258 |
+
" 'achievements': ['Worked on optimization of Stable Diffusion models to improve performance, achieving significant efficiency gains.',\n",
|
259 |
+
" 'Implemented the Hyper-SD framework to enhance image synthesis efficiency by Knowledge Distillation techniques.',\n",
|
260 |
+
" 'Combined the advantages of Trajectory Preserving and Reformulation Distillation techniques for faster inference.',\n",
|
261 |
+
" 'Trained a Control Net for SDXL, resulting in a 30% improvement of the inference steps from the base SDXL model.']},\n",
|
262 |
+
" {'company': 'Western Union',\n",
|
263 |
+
" 'position': 'Software Intern',\n",
|
264 |
+
" 'duration': 'Jan 2023 – Jun 2023',\n",
|
265 |
+
" 'achievements': ['Engaged with Quantum Metric to enrich powerful UX analysis, streamlining user experience design and functionality.',\n",
|
266 |
+
" 'Analyzed global transaction data, identified bugs, and implementing solutions that boosted conversion rates by 10%.']},\n",
|
267 |
+
" {'company': 'Amazon Web Services',\n",
|
268 |
+
" 'position': 'Intern',\n",
|
269 |
+
" 'duration': 'Oct 2021 – Dec 2021',\n",
|
270 |
+
" 'achievements': ['Acquired foundational skills in AWS, utilizing essential tools and services to support scalable cloud architectures.',\n",
|
271 |
+
" 'Completed the Solutions Architect Project and gained insights about the need of Cloud and AWS in today’s world.']}],\n",
|
272 |
+
" 'projects': [{'name': 'Gurgaon Real Estate Price Prediction',\n",
|
273 |
+
" 'technologies': 'Machine Learning, AWS',\n",
|
274 |
+
" 'achievements': ['Conducted data preprocessing, feature engineering, and performed EDA to optimize model performance.',\n",
|
275 |
+
" 'Experimented different models including Linear Regression, Decision Tree, Random Forest, XGBoost etc.',\n",
|
276 |
+
" 'Achieved a best R² score of 0.90 and a Mean Absolute Error (MAE) of 44 lakhs with the RandomForest model.',\n",
|
277 |
+
" 'Created a Geo-map for sectors in Gurgaon with color-coded pricing making it easy for the user to select property.',\n",
|
278 |
+
" 'Developed a dual-layer recommendation system to boost user engagement by suggesting top 5 properties to the user.',\n",
|
279 |
+
" 'Deployed the modules using Streamlit and AWS, enabling real-time access and interactive analytics for end-users.']},\n",
|
280 |
+
" {'name': 'Optimizing Performance of Dilated Convolution',\n",
|
281 |
+
" 'technologies': 'C++, CUDA',\n",
|
282 |
+
" 'achievements': ['Implemented different optimization methods to reduce the overall time required for Dialated Convolution.',\n",
|
283 |
+
" 'Optimized it using single threading and achieved a maximum improvement of 85.77%.',\n",
|
284 |
+
" 'Achieved a maximum improvement of 96% through multi-threading by changing the number of threads.',\n",
|
285 |
+
" 'Implemented it for a GPU using CUDA resulting in the speedup of 600.47 and improvement of 99.83%.']},\n",
|
286 |
+
" {'name': 'Movie Recommendation System',\n",
|
287 |
+
" 'technologies': 'Python',\n",
|
288 |
+
" 'achievements': ['Created an end to end Machine Learning project using Streamlit framework in Python and movies dataset from Kaggle.',\n",
|
289 |
+
" 'Developed a Content based Recommendation System using cosine similarity to analyze similarities among 5000 movies.',\n",
|
290 |
+
" 'Successfully deployed the application on Streamlit Community Cloud, enabling real-time user interactions and feedback.']}],\n",
|
291 |
+
" 'skills': {'languages': ['Python', 'C++'],\n",
|
292 |
+
" 'developerTools': ['VS Code', 'Jupyter Notebook', 'Google Colab'],\n",
|
293 |
+
" 'technical': ['Neural Networks',\n",
|
294 |
+
" 'Machine Learning',\n",
|
295 |
+
" 'Deep Learning',\n",
|
296 |
+
" 'Gen AI',\n",
|
297 |
+
" 'Natural Language Processing (NLP)']},\n",
|
298 |
+
" 'achievements': [{'position': 'Teaching Assistant',\n",
|
299 |
+
" 'course': 'UENG-101 Algorithms and Programming',\n",
|
300 |
+
" 'professors': ['Prof. Y.Narahari', 'Prof. Viraj Kumar']},\n",
|
301 |
+
" {'position': 'First Position',\n",
|
302 |
+
" 'competition': 'Chase The Py By CODEFIESTA 2022'},\n",
|
303 |
+
" {'position': 'Global Rank of 157',\n",
|
304 |
+
" 'competition': 'February Long Challenge at Codechef'},\n",
|
305 |
+
" {'position': 'Gold badges',\n",
|
306 |
+
" 'domains': ['Python', 'C++', 'Problem Solving Domain'],\n",
|
307 |
+
" 'platform': 'HackerRank'}]}"
|
308 |
+
]
|
309 |
+
},
|
310 |
+
"execution_count": 81,
|
311 |
+
"metadata": {},
|
312 |
+
"output_type": "execute_result"
|
313 |
+
}
|
314 |
+
],
|
315 |
+
"source": [
|
316 |
+
"job"
|
317 |
+
]
|
318 |
+
},
|
319 |
+
{
|
320 |
+
"cell_type": "code",
|
321 |
+
"execution_count": 82,
|
322 |
+
"id": "25ab0c96-8800-4102-8973-cbf42e68a11d",
|
323 |
+
"metadata": {},
|
324 |
+
"outputs": [],
|
325 |
+
"source": [
|
326 |
+
"# def generate_cold_email(details):\n",
|
327 |
+
"# # Extract name\n",
|
328 |
+
"# name = details.get('name', 'Candidate')\n",
|
329 |
+
"\n",
|
330 |
+
"# # Extract education details\n",
|
331 |
+
"# education_list = details.get('education', [])\n",
|
332 |
+
"# if education_list:\n",
|
333 |
+
"# education_details = ', '.join([f\"{edu.get('degree', 'Unknown degree')} from {edu.get('institution', 'Unknown institution')} ({edu.get('duration', 'Unknown duration')})\" for edu in education_list])\n",
|
334 |
+
"# else:\n",
|
335 |
+
"# education_details = 'No education details provided'\n",
|
336 |
+
"\n",
|
337 |
+
"# # Extract skills details\n",
|
338 |
+
"# skills_list = details.get('skills', [])\n",
|
339 |
+
"# if skills_list:\n",
|
340 |
+
"# skills_details = ', '.join([', '.join(skill.get('tools', [])) for skill in skills_list])\n",
|
341 |
+
"# else:\n",
|
342 |
+
"# skills_details = 'No skills listed'\n",
|
343 |
+
"\n",
|
344 |
+
"# # Extract experience details\n",
|
345 |
+
"# experience_list = details.get('experience', [])\n",
|
346 |
+
"# if experience_list:\n",
|
347 |
+
"# experience_details = ', '.join([f\"{exp.get('position', 'Unknown position')} at {exp.get('company', 'Unknown company')} ({exp.get('duration', 'Unknown duration')})\" for exp in experience_list])\n",
|
348 |
+
"# else:\n",
|
349 |
+
"# experience_details = 'No experience provided'\n",
|
350 |
+
"\n",
|
351 |
+
"# # Extract project details\n",
|
352 |
+
"# project_list = details.get('projects', [])\n",
|
353 |
+
"# if project_list:\n",
|
354 |
+
"# project_details = ', '.join([project.get('name', 'Unknown project') for project in project_list])\n",
|
355 |
+
"# else:\n",
|
356 |
+
"# project_details = 'No projects listed'\n",
|
357 |
+
"\n",
|
358 |
+
"# # Extract achievements details\n",
|
359 |
+
"# achievement_list = details.get('achievements', [])\n",
|
360 |
+
"# if achievement_list:\n",
|
361 |
+
"# achievement_details = ', '.join([f\"{achieve.get('position', 'Unknown position')} - {achieve.get('description', 'Unknown achievement')}\" for achieve in achievement_list])\n",
|
362 |
+
"# else:\n",
|
363 |
+
"# achievement_details = 'No achievements listed'\n",
|
364 |
+
"\n",
|
365 |
+
"# email_prompt = f\"\"\"\n",
|
366 |
+
"# You are {name}, educated at {education_details}. Your skills include {skills_details}.\n",
|
367 |
+
" \n",
|
368 |
+
"# You have experience as {experience_details}.\n",
|
369 |
+
" \n",
|
370 |
+
"# Some of your key projects include {project_details}.\n",
|
371 |
+
" \n",
|
372 |
+
"# Additionally, your achievements include {achievement_details}.\n",
|
373 |
+
" \n",
|
374 |
+
"# Write a cold email to a potential employer or client, showcasing your skills, education, projects, and achievements. Explain how your background makes you an ideal candidate for their needs.\n",
|
375 |
+
"\n",
|
376 |
+
"# Remember, you are {name}, ready to make a significant impact in your new role.\n",
|
377 |
+
"# \"\"\"\n",
|
378 |
+
"# return email_prompt\n"
|
379 |
+
]
|
380 |
+
},
|
381 |
+
{
|
382 |
+
"cell_type": "code",
|
383 |
+
"execution_count": 83,
|
384 |
+
"id": "102afc75-d182-4724-8261-6ff02c450f39",
|
385 |
+
"metadata": {},
|
386 |
+
"outputs": [],
|
387 |
+
"source": [
|
388 |
+
"# generate_cold_email(job)"
|
389 |
+
]
|
390 |
+
},
|
391 |
+
{
|
392 |
+
"cell_type": "code",
|
393 |
+
"execution_count": 84,
|
394 |
+
"id": "1513b4bf-a92b-4483-babd-7aa37ddb74c4",
|
395 |
+
"metadata": {},
|
396 |
+
"outputs": [],
|
397 |
+
"source": [
|
398 |
+
"# def generate_cold_email(details):\n",
|
399 |
+
"# # Extract name\n",
|
400 |
+
"# name = details.get('name', 'Candidate')\n",
|
401 |
+
"\n",
|
402 |
+
"# # Extract education details\n",
|
403 |
+
"# education_list = details.get('education', [])\n",
|
404 |
+
"# if education_list:\n",
|
405 |
+
"# education_details = ', '.join([f\"{edu.get('degree', 'Unknown degree')} from {edu.get('institution', 'Unknown institution')} ({edu.get('duration', 'Unknown duration')})\" for edu in education_list])\n",
|
406 |
+
"# else:\n",
|
407 |
+
"# education_details = 'No education details provided'\n",
|
408 |
+
"\n",
|
409 |
+
"# # Extract skills details\n",
|
410 |
+
"# skills_list = details.get('skills', [])\n",
|
411 |
+
"# if skills_list:\n",
|
412 |
+
"# skills_details = ', '.join([', '.join(skill.get('tools', [])) for skill in skills_list])\n",
|
413 |
+
"# else:\n",
|
414 |
+
"# skills_details = 'No skills listed'\n",
|
415 |
+
"\n",
|
416 |
+
"# # Extract experience details\n",
|
417 |
+
"# experience_list = details.get('experience', [])\n",
|
418 |
+
"# if experience_list:\n",
|
419 |
+
"# experience_details = []\n",
|
420 |
+
"# for exp in experience_list:\n",
|
421 |
+
"# position = exp.get('position', 'Unknown position')\n",
|
422 |
+
"# company = exp.get('company', 'Unknown company')\n",
|
423 |
+
"# duration = exp.get('duration', 'Unknown duration')\n",
|
424 |
+
"# achievements = exp.get('achievements', [])\n",
|
425 |
+
"# achievements_details = ', '.join(achievements) if achievements else 'No achievements mentioned'\n",
|
426 |
+
"# experience_details.append(f\"{position} at {company} ({duration}): {achievements_details}\")\n",
|
427 |
+
"# experience_details = ', '.join(experience_details)\n",
|
428 |
+
"# else:\n",
|
429 |
+
"# experience_details = 'No experience provided'\n",
|
430 |
+
"\n",
|
431 |
+
"# # Extract project details\n",
|
432 |
+
"# project_list = details.get('projects', [])\n",
|
433 |
+
"# if project_list:\n",
|
434 |
+
"# project_details = ', '.join([project.get('name', 'Unknown project') for project in project_list])\n",
|
435 |
+
"# else:\n",
|
436 |
+
"# project_details = 'No projects listed'\n",
|
437 |
+
"\n",
|
438 |
+
"# # Extract achievements details\n",
|
439 |
+
"# achievement_list = details.get('achievements', [])\n",
|
440 |
+
"# if achievement_list:\n",
|
441 |
+
"# achievement_details = ', '.join([f\"{achieve.get('position', 'Unknown position')} - {achieve.get('description', 'Unknown achievement')}\" for achieve in achievement_list])\n",
|
442 |
+
"# else:\n",
|
443 |
+
"# achievement_details = 'No achievements listed'\n",
|
444 |
+
"\n",
|
445 |
+
"# email_prompt = f\"\"\"\n",
|
446 |
+
"# You are {name}, educated at {education_details}. Your skills include {skills_details}.\n",
|
447 |
+
" \n",
|
448 |
+
"# You have experience as {experience_details}.\n",
|
449 |
+
" \n",
|
450 |
+
"# Some of your key projects include {project_details}.\n",
|
451 |
+
" \n",
|
452 |
+
"# Additionally, your achievements include {achievement_details}.\n",
|
453 |
+
" \n",
|
454 |
+
"# Write a cold email to a potential employer or client, showcasing your skills, education, experience (including responsibilities and achievements), projects, and achievements. Explain how your background makes you an ideal candidate for their needs.\n",
|
455 |
+
"\n",
|
456 |
+
"# Remember, you are {name}, ready to make a significant impact in your new role.\n",
|
457 |
+
"# \"\"\"\n",
|
458 |
+
"# return email_prompt\n"
|
459 |
+
]
|
460 |
+
},
|
461 |
+
{
|
462 |
+
"cell_type": "code",
|
463 |
+
"execution_count": 86,
|
464 |
+
"id": "8c13b01b-1715-452e-ba64-22ab7aac92ce",
|
465 |
+
"metadata": {},
|
466 |
+
"outputs": [],
|
467 |
+
"source": [
|
468 |
+
"# generate_cold_email(job)\n",
|
469 |
+
"# "
|
470 |
+
]
|
471 |
+
},
|
472 |
+
{
|
473 |
+
"cell_type": "code",
|
474 |
+
"execution_count": 87,
|
475 |
+
"id": "afc38bb5-0aef-491f-ae17-ef665e89544a",
|
476 |
+
"metadata": {},
|
477 |
+
"outputs": [],
|
478 |
+
"source": [
|
479 |
+
"def generate_cold_email(details):\n",
|
480 |
+
" # Extract name\n",
|
481 |
+
" name = details.get('name', 'Candidate')\n",
|
482 |
+
"\n",
|
483 |
+
" # Extract education details\n",
|
484 |
+
" education_list = details.get('education', [])\n",
|
485 |
+
" if education_list:\n",
|
486 |
+
" education_details = ', '.join([f\"{edu.get('degree', 'Unknown degree')} from {edu.get('institution', 'Unknown institution')} ({edu.get('duration', 'Unknown duration')})\" for edu in education_list])\n",
|
487 |
+
" else:\n",
|
488 |
+
" education_details = 'No education details provided'\n",
|
489 |
+
"\n",
|
490 |
+
" # Extract skills details\n",
|
491 |
+
" skills_list = details.get('skills', [])\n",
|
492 |
+
" if skills_list:\n",
|
493 |
+
" skills_details = ', '.join([', '.join(skill.get('tools', [])) for skill in skills_list])\n",
|
494 |
+
" else:\n",
|
495 |
+
" skills_details = 'No skills listed'\n",
|
496 |
+
"\n",
|
497 |
+
" # Extract experience details\n",
|
498 |
+
" experience_list = details.get('experience', [])\n",
|
499 |
+
" if experience_list:\n",
|
500 |
+
" experience_details = []\n",
|
501 |
+
" for exp in experience_list:\n",
|
502 |
+
" position = exp.get('position', 'Unknown position')\n",
|
503 |
+
" company = exp.get('company', 'Unknown company')\n",
|
504 |
+
" duration = exp.get('duration', 'Unknown duration')\n",
|
505 |
+
" achievements = exp.get('achievements', [])\n",
|
506 |
+
" achievements_details = ', '.join(achievements) if achievements else 'No achievements mentioned'\n",
|
507 |
+
" experience_details.append(f\"{position} at {company} ({duration}): {achievements_details}\")\n",
|
508 |
+
" experience_details = ', '.join(experience_details)\n",
|
509 |
+
" else:\n",
|
510 |
+
" experience_details = 'No experience provided'\n",
|
511 |
+
"\n",
|
512 |
+
" # Extract project details\n",
|
513 |
+
" project_list = details.get('projects', [])\n",
|
514 |
+
" if project_list:\n",
|
515 |
+
" project_details = []\n",
|
516 |
+
" for project in project_list:\n",
|
517 |
+
" project_name = project.get('name', 'Unknown project')\n",
|
518 |
+
" project_description = ', '.join(project.get('description', [])) if project.get('description') else 'No details provided'\n",
|
519 |
+
" project_details.append(f\"{project_name}: {project_description}\")\n",
|
520 |
+
" project_details = ', '.join(project_details)\n",
|
521 |
+
" else:\n",
|
522 |
+
" project_details = 'No projects listed'\n",
|
523 |
+
"\n",
|
524 |
+
" # Extract achievements details\n",
|
525 |
+
" achievement_list = details.get('achievements', [])\n",
|
526 |
+
" if achievement_list:\n",
|
527 |
+
" achievement_details = ', '.join([f\"{achieve.get('position', 'Unknown position')} - {achieve.get('description', 'Unknown achievement')}\" for achieve in achievement_list])\n",
|
528 |
+
" else:\n",
|
529 |
+
" achievement_details = 'No achievements listed'\n",
|
530 |
+
"\n",
|
531 |
+
" email_prompt = f\"\"\"\n",
|
532 |
+
" You are {name}, educated at {education_details}. Your skills include {skills_details}.\n",
|
533 |
+
" \n",
|
534 |
+
" You have experience as {experience_details}.\n",
|
535 |
+
" \n",
|
536 |
+
" Some of your key projects include {project_details}.\n",
|
537 |
+
" \n",
|
538 |
+
" Additionally, your achievements include {achievement_details}.\n",
|
539 |
+
" \n",
|
540 |
+
" Write a cold email to a potential employer or client, showcasing your skills, education, experience (including responsibilities and achievements), projects (with descriptions), and achievements. Explain how your background makes you an ideal candidate for their needs.\n",
|
541 |
+
"\n",
|
542 |
+
" Remember, you are {name}, ready to make a significant impact in your new role.\n",
|
543 |
+
" \"\"\"\n",
|
544 |
+
" return email_prompt\n"
|
545 |
+
]
|
546 |
+
},
|
547 |
+
{
|
548 |
+
"cell_type": "code",
|
549 |
+
"execution_count": 89,
|
550 |
+
"id": "8c4555f3-f98c-4951-9c40-81a2c779206c",
|
551 |
+
"metadata": {},
|
552 |
+
"outputs": [
|
553 |
+
{
|
554 |
+
"ename": "AttributeError",
|
555 |
+
"evalue": "'str' object has no attribute 'get'",
|
556 |
+
"output_type": "error",
|
557 |
+
"traceback": [
|
558 |
+
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
|
559 |
+
"\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)",
|
560 |
+
"Cell \u001b[1;32mIn[89], line 1\u001b[0m\n\u001b[1;32m----> 1\u001b[0m \u001b[43mgenerate_cold_email\u001b[49m\u001b[43m(\u001b[49m\u001b[43mjob\u001b[49m\u001b[43m)\u001b[49m\n",
|
561 |
+
"Cell \u001b[1;32mIn[87], line 15\u001b[0m, in \u001b[0;36mgenerate_cold_email\u001b[1;34m(details)\u001b[0m\n\u001b[0;32m 13\u001b[0m skills_list \u001b[38;5;241m=\u001b[39m details\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mskills\u001b[39m\u001b[38;5;124m'\u001b[39m, [])\n\u001b[0;32m 14\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m skills_list:\n\u001b[1;32m---> 15\u001b[0m skills_details \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin([\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(skill\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtools\u001b[39m\u001b[38;5;124m'\u001b[39m, [])) \u001b[38;5;28;01mfor\u001b[39;00m skill \u001b[38;5;129;01min\u001b[39;00m skills_list])\n\u001b[0;32m 16\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 17\u001b[0m skills_details \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mNo skills listed\u001b[39m\u001b[38;5;124m'\u001b[39m\n",
|
562 |
+
"Cell \u001b[1;32mIn[87], line 15\u001b[0m, in \u001b[0;36m<listcomp>\u001b[1;34m(.0)\u001b[0m\n\u001b[0;32m 13\u001b[0m skills_list \u001b[38;5;241m=\u001b[39m details\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mskills\u001b[39m\u001b[38;5;124m'\u001b[39m, [])\n\u001b[0;32m 14\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m skills_list:\n\u001b[1;32m---> 15\u001b[0m skills_details \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin([\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(\u001b[43mskill\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtools\u001b[39m\u001b[38;5;124m'\u001b[39m, [])) \u001b[38;5;28;01mfor\u001b[39;00m skill \u001b[38;5;129;01min\u001b[39;00m skills_list])\n\u001b[0;32m 16\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 17\u001b[0m skills_details \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mNo skills listed\u001b[39m\u001b[38;5;124m'\u001b[39m\n",
|
563 |
+
"\u001b[1;31mAttributeError\u001b[0m: 'str' object has no attribute 'get'"
|
564 |
+
]
|
565 |
+
}
|
566 |
+
],
|
567 |
+
"source": [
|
568 |
+
"generate_cold_email(job)"
|
569 |
+
]
|
570 |
+
},
|
571 |
+
{
|
572 |
+
"cell_type": "code",
|
573 |
+
"execution_count": 88,
|
574 |
+
"id": "010e4425-8b74-4052-8a72-5c3bb0614847",
|
575 |
+
"metadata": {},
|
576 |
+
"outputs": [
|
577 |
+
{
|
578 |
+
"ename": "AttributeError",
|
579 |
+
"evalue": "'str' object has no attribute 'get'",
|
580 |
+
"output_type": "error",
|
581 |
+
"traceback": [
|
582 |
+
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
|
583 |
+
"\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)",
|
584 |
+
"Cell \u001b[1;32mIn[88], line 1\u001b[0m\n\u001b[1;32m----> 1\u001b[0m resume_details \u001b[38;5;241m=\u001b[39m \u001b[43mgenerate_cold_email\u001b[49m\u001b[43m(\u001b[49m\u001b[43mjob\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;28mtype\u001b[39m(resume_details)\n",
|
585 |
+
"Cell \u001b[1;32mIn[87], line 15\u001b[0m, in \u001b[0;36mgenerate_cold_email\u001b[1;34m(details)\u001b[0m\n\u001b[0;32m 13\u001b[0m skills_list \u001b[38;5;241m=\u001b[39m details\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mskills\u001b[39m\u001b[38;5;124m'\u001b[39m, [])\n\u001b[0;32m 14\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m skills_list:\n\u001b[1;32m---> 15\u001b[0m skills_details \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin([\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(skill\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtools\u001b[39m\u001b[38;5;124m'\u001b[39m, [])) \u001b[38;5;28;01mfor\u001b[39;00m skill \u001b[38;5;129;01min\u001b[39;00m skills_list])\n\u001b[0;32m 16\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 17\u001b[0m skills_details \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mNo skills listed\u001b[39m\u001b[38;5;124m'\u001b[39m\n",
|
586 |
+
"Cell \u001b[1;32mIn[87], line 15\u001b[0m, in \u001b[0;36m<listcomp>\u001b[1;34m(.0)\u001b[0m\n\u001b[0;32m 13\u001b[0m skills_list \u001b[38;5;241m=\u001b[39m details\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mskills\u001b[39m\u001b[38;5;124m'\u001b[39m, [])\n\u001b[0;32m 14\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m skills_list:\n\u001b[1;32m---> 15\u001b[0m skills_details \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin([\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(\u001b[43mskill\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtools\u001b[39m\u001b[38;5;124m'\u001b[39m, [])) \u001b[38;5;28;01mfor\u001b[39;00m skill \u001b[38;5;129;01min\u001b[39;00m skills_list])\n\u001b[0;32m 16\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 17\u001b[0m skills_details \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mNo skills listed\u001b[39m\u001b[38;5;124m'\u001b[39m\n",
|
587 |
+
"\u001b[1;31mAttributeError\u001b[0m: 'str' object has no attribute 'get'"
|
588 |
+
]
|
589 |
+
}
|
590 |
+
],
|
591 |
+
"source": [
|
592 |
+
"resume_details = generate_cold_email(job)\n",
|
593 |
+
"type(resume_details)"
|
594 |
+
]
|
595 |
+
},
|
596 |
+
{
|
597 |
+
"cell_type": "code",
|
598 |
+
"execution_count": 60,
|
599 |
+
"id": "be7377e8-166e-452e-966c-64df9df46462",
|
600 |
+
"metadata": {},
|
601 |
+
"outputs": [],
|
602 |
+
"source": [
|
603 |
+
"# job"
|
604 |
+
]
|
605 |
+
},
|
606 |
+
{
|
607 |
+
"cell_type": "code",
|
608 |
+
"execution_count": 57,
|
609 |
+
"id": "237becf8-2351-40c4-a343-0eaf61e10230",
|
610 |
+
"metadata": {},
|
611 |
+
"outputs": [],
|
612 |
+
"source": [
|
613 |
+
"# this is prompt template for writing an email\n",
|
614 |
+
"\n",
|
615 |
+
"prompt_email = PromptTemplate.from_template(\n",
|
616 |
+
" \"\"\"\n",
|
617 |
+
" ### JOB DESCRIPTION:\n",
|
618 |
+
" {job_description}\n",
|
619 |
+
"\n",
|
620 |
+
" ### INSTRUCTION:\n",
|
621 |
+
" Introduce yourself from the below details\n",
|
622 |
+
" {resume_details}\n",
|
623 |
+
" End the email with Name and Designation. \n",
|
624 |
+
" Do not provide a preamble.\n",
|
625 |
+
" ### EMAIL (NO PREAMBLE):\n",
|
626 |
+
"\n",
|
627 |
+
" \"\"\"\n",
|
628 |
+
" )"
|
629 |
+
]
|
630 |
+
},
|
631 |
+
{
|
632 |
+
"cell_type": "code",
|
633 |
+
"execution_count": 58,
|
634 |
+
"id": "0833b02e-dc85-4595-86c5-c69096fc1a2b",
|
635 |
+
"metadata": {},
|
636 |
+
"outputs": [
|
637 |
+
{
|
638 |
+
"name": "stdout",
|
639 |
+
"output_type": "stream",
|
640 |
+
"text": [
|
641 |
+
"Subject: Expertise in AI, Machine Learning, and Deep Learning for Innovative Solutions\n",
|
642 |
+
"\n",
|
643 |
+
"Dear Hiring Manager,\n",
|
644 |
+
"\n",
|
645 |
+
"I am Mandar Bhalerao, a highly motivated and skilled professional with a strong educational background in Computer Science and Automation. I am excited to introduce myself as a potential candidate for a role that leverages my expertise in AI, Machine Learning, and Deep Learning.\n",
|
646 |
+
"\n",
|
647 |
+
"With a Master of Technology degree in Computer Science and Automation from the Indian Institute of Science (Aug 2023 – Jul 2025) and a Bachelor of Engineering degree in Computer Engineering from P.E.S. Modern College of Engineering (Aug 2019 – Jul 2023), I possess a solid foundation in computer science and software development. My technical skills include proficiency in Python, C++, VS Code, Jupyter Notebook, Google Colab, Neural Networks, Machine Learning, Deep Learning, Gen AI, and Natural Language Processing (NLP).\n",
|
648 |
+
"\n",
|
649 |
+
"As a Deep Learning Research Intern at NeuroPixel.AI (May 2024 – July 2024), I worked on optimizing Stable Diffusion models to improve performance, achieving significant efficiency gains. I implemented the Hyper-SD framework to enhance image synthesis efficiency by Knowledge Distillation techniques and combined the advantages of Trajectory Preserving and Reformulation Distillation techniques for faster inference. I also trained a Control Net for SDXL, resulting in a 30% improvement of the inference steps from the base SDXL model.\n",
|
650 |
+
"\n",
|
651 |
+
"As a Software Intern at Western Union (Jan 2023 – Jun 2023), I engaged with Quantum Metric to enrich powerful UX analysis, streamlining user experience design and functionality. I analyzed global transaction data, identified bugs, and implemented solutions that boosted conversion rates by 10%. As an Intern at Amazon Web Services (Oct 2021 – Dec 2021), I acquired foundational skills in AWS, utilizing essential tools and services to support scalable cloud architectures. I completed the Solutions Architect Project and gained insights about the need of Cloud and AWS in today’s world.\n",
|
652 |
+
"\n",
|
653 |
+
"Some of my notable projects include:\n",
|
654 |
+
"\n",
|
655 |
+
"* Gurgaon Real Estate Price Prediction: I conducted data preprocessing, feature engineering, and performed EDA to optimize model performance. I experimented with different models, including Linear Regression, Decision Tree, Random Forest, XGBoost, and achieved a best R² score of 0.90 and a Mean Absolute Error (MAE) of 44 lakhs with the RandomForest model. I created a Geo-map for sectors in Gurgaon with color-coded pricing, making it easy for users to select properties. I developed a dual-layer recommendation system to boost user engagement by suggesting top 5 properties to users and deployed the modules using Streamlit and AWS.\n",
|
656 |
+
"* Optimizing Performance of Dilated Convolution: I implemented different optimization methods to reduce the overall time required for Dialated Convolution. I optimized it using single threading and achieved a maximum improvement of 85.77%. I achieved a maximum improvement of 96% through multi-threading by changing the number of threads. I implemented it for a GPU using CUDA, resulting in a speedup of 600.47 and an improvement of 99.83%.\n",
|
657 |
+
"* Movie Recommendation System: I created an end-to-end Machine Learning project using the Streamlit framework in Python and the movies dataset from Kaggle. I developed a Content-based Recommendation System using cosine similarity to analyze similarities among 5000 movies. I successfully deployed the application on Streamlit Community Cloud, enabling real-time user interactions and feedback.\n",
|
658 |
+
"\n",
|
659 |
+
"My achievements include:\n",
|
660 |
+
"\n",
|
661 |
+
"* Teaching Assistant for “UENG-101 Algorithms and Programming” by Prof. Y.Narahari and Prof. Viraj Kumar\n",
|
662 |
+
"* First Position in Chase The Py By CODEFIESTA 2022\n",
|
663 |
+
"* Global Rank of 157 in February Long Challenge at Codechef\n",
|
664 |
+
"* Gold badges in Python, C++, and Problem Solving Domain on HackerRank\n",
|
665 |
+
"\n",
|
666 |
+
"With my strong educational background, technical skills, and experience in AI, Machine Learning, and Deep Learning, I am confident that I can make a significant impact in your organization. I am excited about the opportunity to discuss my qualifications further and explore how my skills align with your needs.\n",
|
667 |
+
"\n",
|
668 |
+
"Mandar Bhalerao\n",
|
669 |
+
"AI/ML Engineer & Deep Learning Researcher\n"
|
670 |
+
]
|
671 |
+
}
|
672 |
+
],
|
673 |
+
"source": [
|
674 |
+
"# again creating a chain of prompt_email and llm\n",
|
675 |
+
"# invoking the chain by passing the parameter of job_description and link_list\n",
|
676 |
+
"\n",
|
677 |
+
"chain_email = prompt_email | llm\n",
|
678 |
+
"res = chain_email.invoke({\"job_description\": str(job), \"resume_details\": resume_details})\n",
|
679 |
+
"print(res.content)"
|
680 |
+
]
|
681 |
+
},
|
682 |
+
{
|
683 |
+
"cell_type": "code",
|
684 |
+
"execution_count": null,
|
685 |
+
"id": "a0efcf9c-8936-4d7a-9a8a-0c1121f12caf",
|
686 |
+
"metadata": {},
|
687 |
+
"outputs": [],
|
688 |
+
"source": []
|
689 |
+
}
|
690 |
+
],
|
691 |
+
"metadata": {
|
692 |
+
"kernelspec": {
|
693 |
+
"display_name": "Python 3 (ipykernel)",
|
694 |
+
"language": "python",
|
695 |
+
"name": "python3"
|
696 |
+
},
|
697 |
+
"language_info": {
|
698 |
+
"codemirror_mode": {
|
699 |
+
"name": "ipython",
|
700 |
+
"version": 3
|
701 |
+
},
|
702 |
+
"file_extension": ".py",
|
703 |
+
"mimetype": "text/x-python",
|
704 |
+
"name": "python",
|
705 |
+
"nbconvert_exporter": "python",
|
706 |
+
"pygments_lexer": "ipython3",
|
707 |
+
"version": "3.10.7"
|
708 |
+
}
|
709 |
+
},
|
710 |
+
"nbformat": 4,
|
711 |
+
"nbformat_minor": 5
|
712 |
+
}
|
final_mandar.ipynb
ADDED
@@ -0,0 +1,1472 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 1,
|
6 |
+
"id": "e87b4746-6622-4e21-9788-2df1616d0036",
|
7 |
+
"metadata": {},
|
8 |
+
"outputs": [],
|
9 |
+
"source": [
|
10 |
+
"import pdfplumber\n",
|
11 |
+
"import re\n",
|
12 |
+
"from langchain_groq import ChatGroq"
|
13 |
+
]
|
14 |
+
},
|
15 |
+
{
|
16 |
+
"cell_type": "code",
|
17 |
+
"execution_count": 2,
|
18 |
+
"id": "60f310f1-3ba0-4853-a515-da664eefd907",
|
19 |
+
"metadata": {},
|
20 |
+
"outputs": [],
|
21 |
+
"source": [
|
22 |
+
"llm = ChatGroq(\n",
|
23 |
+
" temperature=0, \n",
|
24 |
+
" groq_api_key='your_api_key_here', \n",
|
25 |
+
" model_name=\"llama-3.1-70b-versatile\"\n",
|
26 |
+
")"
|
27 |
+
]
|
28 |
+
},
|
29 |
+
{
|
30 |
+
"cell_type": "code",
|
31 |
+
"execution_count": 3,
|
32 |
+
"id": "6f55fd44-52fd-40a2-94bd-5c4e33683c52",
|
33 |
+
"metadata": {},
|
34 |
+
"outputs": [
|
35 |
+
{
|
36 |
+
"name": "stderr",
|
37 |
+
"output_type": "stream",
|
38 |
+
"text": [
|
39 |
+
"USER_AGENT environment variable not set, consider setting it to identify your requests.\n"
|
40 |
+
]
|
41 |
+
},
|
42 |
+
{
|
43 |
+
"name": "stdout",
|
44 |
+
"output_type": "stream",
|
45 |
+
"text": [
|
46 |
+
"\n",
|
47 |
+
"\n",
|
48 |
+
"\n",
|
49 |
+
"\n",
|
50 |
+
"\n",
|
51 |
+
"\n",
|
52 |
+
"\n",
|
53 |
+
"\n",
|
54 |
+
"\n",
|
55 |
+
"\n",
|
56 |
+
"\n",
|
57 |
+
"\n",
|
58 |
+
"\n",
|
59 |
+
"\n",
|
60 |
+
"\n",
|
61 |
+
"\n",
|
62 |
+
"\n",
|
63 |
+
"\n",
|
64 |
+
"\n",
|
65 |
+
"\n",
|
66 |
+
"\n",
|
67 |
+
"\n",
|
68 |
+
"\n",
|
69 |
+
"\n",
|
70 |
+
"\n",
|
71 |
+
"\n",
|
72 |
+
"\n",
|
73 |
+
"\n",
|
74 |
+
"\n",
|
75 |
+
"\n",
|
76 |
+
"\n",
|
77 |
+
"\n",
|
78 |
+
"\n",
|
79 |
+
"Data Scientist\n",
|
80 |
+
"\n",
|
81 |
+
"\n",
|
82 |
+
"\n",
|
83 |
+
"\n",
|
84 |
+
"\n",
|
85 |
+
"\n",
|
86 |
+
"\n",
|
87 |
+
"\n",
|
88 |
+
"\n",
|
89 |
+
"\n",
|
90 |
+
"\n",
|
91 |
+
"\n",
|
92 |
+
"\n",
|
93 |
+
"\n",
|
94 |
+
"\n",
|
95 |
+
"\n",
|
96 |
+
"\n",
|
97 |
+
"\n",
|
98 |
+
"\n",
|
99 |
+
"\n",
|
100 |
+
"\n",
|
101 |
+
"\n",
|
102 |
+
"\n",
|
103 |
+
"\n",
|
104 |
+
"\n",
|
105 |
+
"About\n",
|
106 |
+
"Alum\n",
|
107 |
+
"Inclusion\n",
|
108 |
+
"Careers\n",
|
109 |
+
"Culture\n",
|
110 |
+
"Blog\n",
|
111 |
+
"Tech\n",
|
112 |
+
"\n",
|
113 |
+
"\n",
|
114 |
+
"\n",
|
115 |
+
"\n",
|
116 |
+
"\n",
|
117 |
+
"\n",
|
118 |
+
"\n",
|
119 |
+
"\n",
|
120 |
+
"\n",
|
121 |
+
"\n",
|
122 |
+
"Data Scientist\n",
|
123 |
+
"Bengaluru\n",
|
124 |
+
"\n",
|
125 |
+
"\n",
|
126 |
+
"\n",
|
127 |
+
"\n",
|
128 |
+
"\n",
|
129 |
+
"\n",
|
130 |
+
"\n",
|
131 |
+
"\n",
|
132 |
+
"Share\n",
|
133 |
+
"\n",
|
134 |
+
"\n",
|
135 |
+
"\n",
|
136 |
+
"\n",
|
137 |
+
"\n",
|
138 |
+
"\n",
|
139 |
+
"\n",
|
140 |
+
"\n",
|
141 |
+
"\n",
|
142 |
+
"Apply\n",
|
143 |
+
"\n",
|
144 |
+
"\n",
|
145 |
+
"\n",
|
146 |
+
"About Team\n",
|
147 |
+
"Myntra Data Science team delivers a large number of data science solutions for the company which are deployed at various customer touch points every quarter. The models create significant revenue and customer experience impact. The models involve real-time, near-real-time and offline solutions with varying latency requirements. The models are built using massive datasets. You will have the opportunity to be part of a rapidly growing organization and gain exposure to all the parts of a comprehensive ecommerce platform. You’ll also get to learn the intricacies of building models that serve millions of requests per second at sub second latency. \n",
|
148 |
+
"The team takes pride in deploying solutions that not only leverage state of the art machine learning models like graph neural networks, diffusion models, transformers, representation learning, optimization methods and bayesian modeling but also contribute to research literature with multiple peer-reviewed research papers.\n",
|
149 |
+
"Roles and Responsibilities\n",
|
150 |
+
"\n",
|
151 |
+
"Design, develop and deploy machine learning models,algorithms and systems to solve complex business problems for Myntra Recsys, Search, Vision, SCM, Pricing, Forecasting, Trend and Virality prediction, Gen AI and other areas\n",
|
152 |
+
"Theoretical understanding and practise of machine learning and expertise in one or more of the topics, such as, NLP, Computer Vision, recommender systems and Optimisation. \n",
|
153 |
+
"Implement robust and reliable software solutions for model deployment.\n",
|
154 |
+
"Support the team in maintaining machine learning pipelines, contributing to tasks like data cleaning, feature extraction and basic model training.\n",
|
155 |
+
"Participate in monitoring the performance of machine learning models, gaining experience in using statistical methods for evaluation.\n",
|
156 |
+
"Working with the Data Platforms teams for understanding and collecting the data.\n",
|
157 |
+
"Conduct performance testing, troubleshooting and tuning as required.\n",
|
158 |
+
"Stay current with the latest research and technology and communicate your knowledge throughout the enterprise.\n",
|
159 |
+
"\n",
|
160 |
+
"Qualifications & Experience\n",
|
161 |
+
"\n",
|
162 |
+
"Master’s/PhD in Computer Science, Mathematics, Statistics/related fields ‘or’ 1+ years of relevant industry experience with a Bachelor’s degree.\n",
|
163 |
+
"Proficiency in Python or one other high-level programming language.\n",
|
164 |
+
"Theoretical understanding of statistical models such as regression, clustering and ML algorithms such as decision trees, neural networks, etc.\n",
|
165 |
+
"Strong written and verbal communication skills\n",
|
166 |
+
"Intellectual curiosity and enthusiastic about continuous learning\n",
|
167 |
+
"Experience developing machine learning models in Python, or equivalent programming language.\n",
|
168 |
+
"Basic familiarity with machine learning frameworks like TensorFlow, PyTorch, or scikit-learn.\n",
|
169 |
+
"Introductory understanding of statistics as it applies to machine learning.\n",
|
170 |
+
"Ability to manage and prioritize your workload and support his/her manager.\n",
|
171 |
+
"Experience with SQL and/or NoSQL databases.\n",
|
172 |
+
"If you are an exceptional candidate, write in. We are happy to hire you even if you don't have the certified qualifications.\n",
|
173 |
+
"\n",
|
174 |
+
"Nice to Have:\n",
|
175 |
+
"\n",
|
176 |
+
"Publications or presentations in recognized Machine Learning and Data Science journals/conferences.\n",
|
177 |
+
"Experience with ML orchestration tools (Airflow, Kubeflow or MLFlow)\n",
|
178 |
+
"Exposure to GenAI models.\n",
|
179 |
+
"\n",
|
180 |
+
" \n",
|
181 |
+
"\n",
|
182 |
+
"\n",
|
183 |
+
"\n",
|
184 |
+
"\n",
|
185 |
+
"\n",
|
186 |
+
"\n",
|
187 |
+
"\n",
|
188 |
+
"\n",
|
189 |
+
"×\n",
|
190 |
+
"\n",
|
191 |
+
"\n",
|
192 |
+
"\n",
|
193 |
+
"\n",
|
194 |
+
"Apply now\n",
|
195 |
+
"\n",
|
196 |
+
"\n",
|
197 |
+
"\n",
|
198 |
+
"\n",
|
199 |
+
"Name *\n",
|
200 |
+
"\n",
|
201 |
+
"\n",
|
202 |
+
"\n",
|
203 |
+
"\n",
|
204 |
+
"\n",
|
205 |
+
"Last Name *\n",
|
206 |
+
"\n",
|
207 |
+
"\n",
|
208 |
+
"\n",
|
209 |
+
"\n",
|
210 |
+
"\n",
|
211 |
+
"Your Email *\n",
|
212 |
+
"\n",
|
213 |
+
"\n",
|
214 |
+
"\n",
|
215 |
+
"\n",
|
216 |
+
"\n",
|
217 |
+
"Phone *\n",
|
218 |
+
"\n",
|
219 |
+
"\n",
|
220 |
+
"\n",
|
221 |
+
"\n",
|
222 |
+
"\n",
|
223 |
+
"Your current location *\n",
|
224 |
+
"\n",
|
225 |
+
"\n",
|
226 |
+
"\n",
|
227 |
+
"\n",
|
228 |
+
"\n",
|
229 |
+
"Resume/CV *\n",
|
230 |
+
"\n",
|
231 |
+
"\n",
|
232 |
+
"Attach\n",
|
233 |
+
"\n",
|
234 |
+
"×\n",
|
235 |
+
"\n",
|
236 |
+
"\n",
|
237 |
+
"\n",
|
238 |
+
"Cover Letter\n",
|
239 |
+
"\n",
|
240 |
+
"\n",
|
241 |
+
"Attach\n",
|
242 |
+
"Paste\n",
|
243 |
+
"\n",
|
244 |
+
"×\n",
|
245 |
+
"\n",
|
246 |
+
"\n",
|
247 |
+
"\n",
|
248 |
+
"\n",
|
249 |
+
"\n",
|
250 |
+
"Submit \n",
|
251 |
+
"\n",
|
252 |
+
"\n",
|
253 |
+
"\n",
|
254 |
+
"\n",
|
255 |
+
"\n",
|
256 |
+
"\n",
|
257 |
+
"\n",
|
258 |
+
"\n",
|
259 |
+
"\n",
|
260 |
+
"\n",
|
261 |
+
"\n",
|
262 |
+
"\n",
|
263 |
+
"We got your Appliaction, our team will get back to you soon.\n",
|
264 |
+
"\n",
|
265 |
+
"\n",
|
266 |
+
"\n",
|
267 |
+
"\n",
|
268 |
+
"\n",
|
269 |
+
"\n",
|
270 |
+
"\n",
|
271 |
+
"\n",
|
272 |
+
"\n",
|
273 |
+
"\n",
|
274 |
+
"\n",
|
275 |
+
"Looks like the application has not uploaded, Please try agin.\n",
|
276 |
+
"\n",
|
277 |
+
"\n",
|
278 |
+
"\n",
|
279 |
+
"\n",
|
280 |
+
"\n",
|
281 |
+
"\n",
|
282 |
+
"\n",
|
283 |
+
"\n",
|
284 |
+
"\n",
|
285 |
+
"\n",
|
286 |
+
"\n",
|
287 |
+
"\n",
|
288 |
+
"\n",
|
289 |
+
"\n",
|
290 |
+
"\n",
|
291 |
+
"Bengaluru (HQ)\n",
|
292 |
+
"\n",
|
293 |
+
"gurgaon\n",
|
294 |
+
"\n",
|
295 |
+
"Mumbai\n",
|
296 |
+
"\n",
|
297 |
+
"\n",
|
298 |
+
"\n",
|
299 |
+
"\n",
|
300 |
+
"\n",
|
301 |
+
"\n",
|
302 |
+
"contact\n",
|
303 |
+
"Shop\n",
|
304 |
+
"Careers\n",
|
305 |
+
"Privacy Policy\n",
|
306 |
+
"Terms & Conditions\n",
|
307 |
+
"\n",
|
308 |
+
"\n",
|
309 |
+
"Myntra is proud to be an Equal Opportunity Employer\n",
|
310 |
+
"\n",
|
311 |
+
"\n",
|
312 |
+
"© 2019 www.myntra.com. All rights reserved.\n",
|
313 |
+
"\n",
|
314 |
+
"\n",
|
315 |
+
"\n",
|
316 |
+
"\n",
|
317 |
+
"\n",
|
318 |
+
"\n",
|
319 |
+
"\n",
|
320 |
+
"\n",
|
321 |
+
"\n",
|
322 |
+
"\n",
|
323 |
+
"\n",
|
324 |
+
"\n",
|
325 |
+
"\n",
|
326 |
+
"\n",
|
327 |
+
"\n",
|
328 |
+
"\n",
|
329 |
+
"\n",
|
330 |
+
"\n",
|
331 |
+
"\n",
|
332 |
+
"\n",
|
333 |
+
"\n",
|
334 |
+
"\n",
|
335 |
+
"\n",
|
336 |
+
"\n"
|
337 |
+
]
|
338 |
+
}
|
339 |
+
],
|
340 |
+
"source": [
|
341 |
+
"# WebBaseLoader will accept the url and extract the data from that, ie web scraping\n",
|
342 |
+
"\n",
|
343 |
+
"from langchain_community.document_loaders import WebBaseLoader\n",
|
344 |
+
"\n",
|
345 |
+
"loader = WebBaseLoader(\"https://careers.myntra.com/job-detail/?id=7431200002\")\n",
|
346 |
+
"page_data = loader.load().pop().page_content\n",
|
347 |
+
"print(page_data)"
|
348 |
+
]
|
349 |
+
},
|
350 |
+
{
|
351 |
+
"cell_type": "code",
|
352 |
+
"execution_count": 4,
|
353 |
+
"id": "f713dde3-cfcf-4efd-9832-ea90b4482418",
|
354 |
+
"metadata": {},
|
355 |
+
"outputs": [],
|
356 |
+
"source": [
|
357 |
+
"from langchain_core.prompts import PromptTemplate\n",
|
358 |
+
"# (NO PREAMBLE) means dont give that initial text like Here is your response.\n",
|
359 |
+
"prompt_extract = PromptTemplate.from_template(\n",
|
360 |
+
" \"\"\"\n",
|
361 |
+
" ### SCRAPED TEXT FROM WEBSITE:\n",
|
362 |
+
" {page_data}\n",
|
363 |
+
" ### INSTRUCTION:\n",
|
364 |
+
" The scraped text is from the career's page of a website.\n",
|
365 |
+
" Your job is to extract the job postings and return them in JSON format containing the \n",
|
366 |
+
" following keys: `role`, `experience`, `skills` and `description`.\n",
|
367 |
+
" Only return the valid JSON.\n",
|
368 |
+
" ### VALID JSON (NO PREAMBLE): \n",
|
369 |
+
" \"\"\"\n",
|
370 |
+
")"
|
371 |
+
]
|
372 |
+
},
|
373 |
+
{
|
374 |
+
"cell_type": "code",
|
375 |
+
"execution_count": 5,
|
376 |
+
"id": "c437186e-8c68-4636-8c03-048dd73c5f8a",
|
377 |
+
"metadata": {},
|
378 |
+
"outputs": [
|
379 |
+
{
|
380 |
+
"name": "stdout",
|
381 |
+
"output_type": "stream",
|
382 |
+
"text": [
|
383 |
+
"[\n",
|
384 |
+
" {\n",
|
385 |
+
" \"role\": \"Data Scientist\",\n",
|
386 |
+
" \"experience\": \"1+ years of relevant industry experience with a Bachelor’s degree or Master’s/PhD in Computer Science, Mathematics, Statistics/related fields\",\n",
|
387 |
+
" \"skills\": [\n",
|
388 |
+
" \"Python or one other high-level programming language\",\n",
|
389 |
+
" \"Theoretical understanding of statistical models such as regression, clustering and ML algorithms such as decision trees, neural networks, etc.\",\n",
|
390 |
+
" \"Machine learning frameworks like TensorFlow, PyTorch, or scikit-learn\",\n",
|
391 |
+
" \"SQL and/or NoSQL databases\"\n",
|
392 |
+
" ],\n",
|
393 |
+
" \"description\": \"Design, develop and deploy machine learning models, algorithms and systems to solve complex business problems for Myntra Recsys, Search, Vision, SCM, Pricing, Forecasting, Trend and Virality prediction, Gen AI and other areas. Theoretical understanding and practise of machine learning and expertise in one or more of the topics, such as, NLP, Computer Vision, recommender systems and Optimisation.\"\n",
|
394 |
+
" }\n",
|
395 |
+
"]\n"
|
396 |
+
]
|
397 |
+
}
|
398 |
+
],
|
399 |
+
"source": [
|
400 |
+
"chain_extract = prompt_extract | llm # this will form a langchain chain ie you are getting a prompt and passing it to LLM \n",
|
401 |
+
"res = chain_extract.invoke(input={'page_data':page_data})\n",
|
402 |
+
"print(res.content)\n",
|
403 |
+
"\n",
|
404 |
+
"# we got the json format of the job description"
|
405 |
+
]
|
406 |
+
},
|
407 |
+
{
|
408 |
+
"cell_type": "code",
|
409 |
+
"execution_count": 6,
|
410 |
+
"id": "f154051f-7366-4d19-9f9c-3dc68a0ddc91",
|
411 |
+
"metadata": {},
|
412 |
+
"outputs": [
|
413 |
+
{
|
414 |
+
"data": {
|
415 |
+
"text/plain": [
|
416 |
+
"str"
|
417 |
+
]
|
418 |
+
},
|
419 |
+
"execution_count": 6,
|
420 |
+
"metadata": {},
|
421 |
+
"output_type": "execute_result"
|
422 |
+
}
|
423 |
+
],
|
424 |
+
"source": [
|
425 |
+
"# but the type of it is string, we want json object so we will use JSON Parser\n",
|
426 |
+
"type(res.content)"
|
427 |
+
]
|
428 |
+
},
|
429 |
+
{
|
430 |
+
"cell_type": "code",
|
431 |
+
"execution_count": 7,
|
432 |
+
"id": "fef8a008-bb45-4a7b-9e0e-2c508685b90f",
|
433 |
+
"metadata": {},
|
434 |
+
"outputs": [
|
435 |
+
{
|
436 |
+
"data": {
|
437 |
+
"text/plain": [
|
438 |
+
"[{'role': 'Data Scientist',\n",
|
439 |
+
" 'experience': '1+ years of relevant industry experience with a Bachelor’s degree or Master’s/PhD in Computer Science, Mathematics, Statistics/related fields',\n",
|
440 |
+
" 'skills': ['Python or one other high-level programming language',\n",
|
441 |
+
" 'Theoretical understanding of statistical models such as regression, clustering and ML algorithms such as decision trees, neural networks, etc.',\n",
|
442 |
+
" 'Machine learning frameworks like TensorFlow, PyTorch, or scikit-learn',\n",
|
443 |
+
" 'SQL and/or NoSQL databases'],\n",
|
444 |
+
" 'description': 'Design, develop and deploy machine learning models, algorithms and systems to solve complex business problems for Myntra Recsys, Search, Vision, SCM, Pricing, Forecasting, Trend and Virality prediction, Gen AI and other areas. Theoretical understanding and practise of machine learning and expertise in one or more of the topics, such as, NLP, Computer Vision, recommender systems and Optimisation.'}]"
|
445 |
+
]
|
446 |
+
},
|
447 |
+
"execution_count": 7,
|
448 |
+
"metadata": {},
|
449 |
+
"output_type": "execute_result"
|
450 |
+
}
|
451 |
+
],
|
452 |
+
"source": [
|
453 |
+
"from langchain_core.output_parsers import JsonOutputParser\n",
|
454 |
+
"\n",
|
455 |
+
"json_parser = JsonOutputParser()\n",
|
456 |
+
"json_res1 = json_parser.parse(res.content)\n",
|
457 |
+
"json_res1"
|
458 |
+
]
|
459 |
+
},
|
460 |
+
{
|
461 |
+
"cell_type": "code",
|
462 |
+
"execution_count": 9,
|
463 |
+
"id": "3e17dd09-198e-490c-877c-a4d98f254440",
|
464 |
+
"metadata": {},
|
465 |
+
"outputs": [],
|
466 |
+
"source": [
|
467 |
+
"# Check if the result is a list and extract the first dictionary\n",
|
468 |
+
"if isinstance(json_res1, list):\n",
|
469 |
+
" json_res1 = json_res1[0]"
|
470 |
+
]
|
471 |
+
},
|
472 |
+
{
|
473 |
+
"cell_type": "code",
|
474 |
+
"execution_count": 10,
|
475 |
+
"id": "8e6d9e8c-60f4-45f5-b9d6-50aeaed38a03",
|
476 |
+
"metadata": {},
|
477 |
+
"outputs": [],
|
478 |
+
"source": [
|
479 |
+
"job_description = json_res1"
|
480 |
+
]
|
481 |
+
},
|
482 |
+
{
|
483 |
+
"cell_type": "code",
|
484 |
+
"execution_count": 11,
|
485 |
+
"id": "31cb3f5d-5d5b-44fc-8116-9909761f57c8",
|
486 |
+
"metadata": {},
|
487 |
+
"outputs": [
|
488 |
+
{
|
489 |
+
"data": {
|
490 |
+
"text/plain": [
|
491 |
+
"{'role': 'Data Scientist',\n",
|
492 |
+
" 'experience': '1+ years of relevant industry experience with a Bachelor’s degree or Master’s/PhD in Computer Science, Mathematics, Statistics/related fields',\n",
|
493 |
+
" 'skills': ['Python or one other high-level programming language',\n",
|
494 |
+
" 'Theoretical understanding of statistical models such as regression, clustering and ML algorithms such as decision trees, neural networks, etc.',\n",
|
495 |
+
" 'Machine learning frameworks like TensorFlow, PyTorch, or scikit-learn',\n",
|
496 |
+
" 'SQL and/or NoSQL databases'],\n",
|
497 |
+
" 'description': 'Design, develop and deploy machine learning models, algorithms and systems to solve complex business problems for Myntra Recsys, Search, Vision, SCM, Pricing, Forecasting, Trend and Virality prediction, Gen AI and other areas. Theoretical understanding and practise of machine learning and expertise in one or more of the topics, such as, NLP, Computer Vision, recommender systems and Optimisation.'}"
|
498 |
+
]
|
499 |
+
},
|
500 |
+
"execution_count": 11,
|
501 |
+
"metadata": {},
|
502 |
+
"output_type": "execute_result"
|
503 |
+
}
|
504 |
+
],
|
505 |
+
"source": [
|
506 |
+
"job_description"
|
507 |
+
]
|
508 |
+
},
|
509 |
+
{
|
510 |
+
"cell_type": "code",
|
511 |
+
"execution_count": 12,
|
512 |
+
"id": "6bd7a6ef-0e46-401f-adfc-9255890eff9d",
|
513 |
+
"metadata": {},
|
514 |
+
"outputs": [],
|
515 |
+
"source": [
|
516 |
+
"import pdfplumber\n",
|
517 |
+
"import re\n",
|
518 |
+
"\n",
|
519 |
+
"def extract_text_from_pdf(pdf_path):\n",
|
520 |
+
" with pdfplumber.open(pdf_path) as pdf:\n",
|
521 |
+
" pages = [page.extract_text() for page in pdf.pages]\n",
|
522 |
+
" all_text = \"\\n\".join(pages) if pages else \"\"\n",
|
523 |
+
" # print(all_text)\n",
|
524 |
+
" return all_text"
|
525 |
+
]
|
526 |
+
},
|
527 |
+
{
|
528 |
+
"cell_type": "code",
|
529 |
+
"execution_count": 13,
|
530 |
+
"id": "c3338ce6-49ca-4b81-8c29-0ea7b01011be",
|
531 |
+
"metadata": {},
|
532 |
+
"outputs": [],
|
533 |
+
"source": [
|
534 |
+
"pdf_path = \"C:/Users/Admin/Downloads/Mandar_Bhalerao_IISc.pdf\"\n",
|
535 |
+
"pdf_data = extract_text_from_pdf(pdf_path)\n",
|
536 |
+
"# output_path = process_resume(pdf_path)\n",
|
537 |
+
"# print(f\"Cold email prompt saved at: {output_path}\")"
|
538 |
+
]
|
539 |
+
},
|
540 |
+
{
|
541 |
+
"cell_type": "code",
|
542 |
+
"execution_count": 41,
|
543 |
+
"id": "c2c74943-397e-46fd-a84b-18e7e9f4c96c",
|
544 |
+
"metadata": {},
|
545 |
+
"outputs": [
|
546 |
+
{
|
547 |
+
"data": {
|
548 |
+
"text/plain": [
|
549 |
+
"'Mandar Bhalerao\\n(cid:131) +91-8788519675 # [email protected] # [email protected] (cid:239) linkedin ˆ HackerRank\\nEducation\\nIndian Institute of Science Aug 2023 – Jul 2025\\nMaster of Technology - Computer Science and Automation CGPA : 7.30/10.0 Bangalore, Karnataka\\nP.E.S. Modern College of Engineering Aug 2019 – Jul 2023\\nBachelor of Engineering - Computer Engineering CGPA : 9.34/10.0 Pune, Maharashtra\\nExperience\\nNeuroPixel.AI — Deep Learning Research Intern May 2024 – July 2024\\n• Worked on optimization of Stable Diffusion models to improve performance, achieving significant efficiency gains.\\n• Implemented the Hyper-SD framework to enhance image synthesis efficiency by Knowledge Distillation techniques.\\n• Combined the advantages of Trajectory Preserving and Reformulation Distillation techniques for faster inference.\\n• Trained a Control Net for SDXL, resulting in a 30% improvement of the inference steps from the base SDXL model.\\nWestern Union — Software Intern Jan 2023 – Jun 2023\\n• Engaged with Quantum Metric to enrich powerful UX analysis, streamlining user experience design and functionality.\\n• Analyzed global transaction data, identified bugs, and implementing solutions that boosted conversion rates by 10%.\\nAmazon Web Services — Intern Oct 2021 – Dec 2021\\n• Acquired foundational skills in AWS, utilizing essential tools and services to support scalable cloud architectures.\\n• Completed the Solutions Architect Project and gained insights about the need of Cloud and AWS in today’s world.\\nProjects\\nGurgaon Real Estate Price Prediction | Source Code | Watch Live | Machine Learning, AWS Aug 2024\\n• Conducted data preprocessing, feature engineering, and performed EDA to optimize model performance.\\n• Experimented different models including Linear Regression, Decision Tree, Random Forest, XGBoost etc.\\n• AchievedabestR² scoreof 0.90andaMean Absolute Error (MAE)of 44 lakhswiththeRandomForestmodel.\\n• Created a Geo-map for sectors in Gurgaon with color-coded pricing making it easy for the user to select property.\\n• Developed a dual-layer recommendation system to boost user engagement by suggesting top 5 properties to the user.\\n• Deployed the modules using Streamlit and AWS, enabling real-time access and interactive analytics for end-users.\\nOptimizing Performance of Dilated Convolution | Source Code |C++, CUDA Nov 2023\\n• Implemented different optimization methods to reduce the overall time required for Dialated Convolution.\\n• Optimized it using single threading and achieved a maximum improvement of 85.77%.\\n• Achieved a maximum improvement of 96% through multi-threading by changing the number of threads.\\n• Implemented it for a GPU using CUDA resulting in the speedup of 600.47 and improvement of 99.83%.\\nMovie Recommendation System | Source Code | Watch Live | Python Nov 2023\\n• Created an end to end Machine Learning project using Streamlit framework in Python and movies dataset from Kaggle.\\n• Developed a Content based Recommendation System using cosine similarity to analyze similarities among 5000 movies.\\n• SuccessfullydeployedtheapplicationonStreamlitCommunityCloud, enablingreal-timeuserinteractionsandfeedback.\\nCoursework\\n• Probability and • Systems for Machine • Machine Learning • Linear Algebra and\\nStatistics Learning Optimization\\nTechnical Skills\\nLanguages and Developer Tools: Python, C++, VS Code, Jupyter Notebook, Google Colab\\nTechnical: Neural Networks, Machine Learning, Deep Learning, Gen AI, Natural Language Processing (NLP)\\nAchievements and Positions of Responsibility\\n• Teaching Assistant for “UENG-101 Algorithms and Programming” by Prof. Y.Narahari and Prof. Viraj Kumar\\n• Secured First Position in Chase The Py By CODEFIESTA 2022\\n• Achieved a Global Rank of 157 in February Long Challenge at Codechef\\n• Earned Gold badges in Python, C++ and Problem Solving Domain on HackerRank.'"
|
550 |
+
]
|
551 |
+
},
|
552 |
+
"execution_count": 41,
|
553 |
+
"metadata": {},
|
554 |
+
"output_type": "execute_result"
|
555 |
+
}
|
556 |
+
],
|
557 |
+
"source": [
|
558 |
+
"pdf_data"
|
559 |
+
]
|
560 |
+
},
|
561 |
+
{
|
562 |
+
"cell_type": "code",
|
563 |
+
"execution_count": 77,
|
564 |
+
"id": "ee125b52-9dad-4f18-8005-0794e25c9df0",
|
565 |
+
"metadata": {},
|
566 |
+
"outputs": [],
|
567 |
+
"source": [
|
568 |
+
"from langchain_core.prompts import PromptTemplate\n",
|
569 |
+
"# (NO PREAMBLE) means dont give that initial text like Here is your response.\n",
|
570 |
+
"prompt_extract = PromptTemplate.from_template(\n",
|
571 |
+
" \"\"\"\n",
|
572 |
+
" ### PDF DATA OBTAINED FROM RESUME:\n",
|
573 |
+
" {pdf_data}\n",
|
574 |
+
" ### INSTRUCTION:\n",
|
575 |
+
" The data is from the resume of a person.\n",
|
576 |
+
" Your job is to extract all the details of this person and summarize it in 200 words, which includes name, education, experience, projects, skills, and achievements.\n",
|
577 |
+
" ### (NO PREAMBLE): \n",
|
578 |
+
" \"\"\"\n",
|
579 |
+
")"
|
580 |
+
]
|
581 |
+
},
|
582 |
+
{
|
583 |
+
"cell_type": "code",
|
584 |
+
"execution_count": 78,
|
585 |
+
"id": "c56490a3-883b-41b7-b5ea-658ccb09d015",
|
586 |
+
"metadata": {},
|
587 |
+
"outputs": [],
|
588 |
+
"source": [
|
589 |
+
"chain_extract = prompt_extract | llm # this will form a langchain chain ie you are getting a prompt and passing it to LLM \n",
|
590 |
+
"res2 = chain_extract.invoke(input={'pdf_data':pdf_data})\n",
|
591 |
+
"# print(res.content)\n",
|
592 |
+
"summary = res2.content"
|
593 |
+
]
|
594 |
+
},
|
595 |
+
{
|
596 |
+
"cell_type": "code",
|
597 |
+
"execution_count": 79,
|
598 |
+
"id": "f2d33a37-0978-4fc0-a681-70600ff0bb0b",
|
599 |
+
"metadata": {},
|
600 |
+
"outputs": [
|
601 |
+
{
|
602 |
+
"name": "stdout",
|
603 |
+
"output_type": "stream",
|
604 |
+
"text": [
|
605 |
+
"Mandar Bhalerao is a highly skilled individual with a strong background in computer science and automation. He is currently pursuing a Master of Technology in Computer Science and Automation at the Indian Institute of Science, with a CGPA of 7.30/10.0. He holds a Bachelor of Engineering in Computer Engineering from P.E.S. Modern College of Engineering, with a CGPA of 9.34/10.0.\n",
|
606 |
+
"\n",
|
607 |
+
"Mandar has gained valuable experience through internships at NeuroPixel.AI, Western Union, and Amazon Web Services. He worked on optimizing Stable Diffusion models, improving user experience design, and analyzing global transaction data. He also completed the Solutions Architect Project and gained insights into cloud architecture.\n",
|
608 |
+
"\n",
|
609 |
+
"Mandar has worked on several projects, including a real estate price prediction model, optimizing performance of dilated convolution, and a movie recommendation system. He has demonstrated expertise in machine learning, deep learning, and natural language processing.\n",
|
610 |
+
"\n",
|
611 |
+
"Mandar's technical skills include proficiency in Python, C++, and developer tools like VS Code and Jupyter Notebook. He has achieved several accolades, including securing the first position in Chase The Py By CODEFIESTA 2022, earning gold badges on HackerRank, and achieving a global rank of 157 in the February Long Challenge at Codechef.\n"
|
612 |
+
]
|
613 |
+
}
|
614 |
+
],
|
615 |
+
"source": [
|
616 |
+
"print(summary)\n"
|
617 |
+
]
|
618 |
+
},
|
619 |
+
{
|
620 |
+
"cell_type": "code",
|
621 |
+
"execution_count": 16,
|
622 |
+
"id": "aef76122-3d14-4d14-ae43-c2c7ff43212e",
|
623 |
+
"metadata": {},
|
624 |
+
"outputs": [],
|
625 |
+
"source": [
|
626 |
+
"from langchain_core.prompts import PromptTemplate\n",
|
627 |
+
"# (NO PREAMBLE) means dont give that initial text like Here is your response.\n",
|
628 |
+
"prompt_extract = PromptTemplate.from_template(\n",
|
629 |
+
" \"\"\"\n",
|
630 |
+
" ### PDF DATA OBTAINED FROM RESUME:\n",
|
631 |
+
" {pdf_data}\n",
|
632 |
+
" ### INSTRUCTION:\n",
|
633 |
+
" The data is from the resume of a person.\n",
|
634 |
+
" Your job is to extract all the details of this person and return them in JSON format containing the \n",
|
635 |
+
" following keys: `name`, `education`, `experience`, `projects`,`skills`, and `achievements`.\n",
|
636 |
+
" Only return the valid JSON.\n",
|
637 |
+
" ### VALID JSON (NO PREAMBLE): \n",
|
638 |
+
" \"\"\"\n",
|
639 |
+
")"
|
640 |
+
]
|
641 |
+
},
|
642 |
+
{
|
643 |
+
"cell_type": "code",
|
644 |
+
"execution_count": 17,
|
645 |
+
"id": "2650b9bb-e304-4016-a8a5-b4f06a94d738",
|
646 |
+
"metadata": {},
|
647 |
+
"outputs": [],
|
648 |
+
"source": [
|
649 |
+
"chain_extract = prompt_extract | llm # this will form a langchain chain ie you are getting a prompt and passing it to LLM \n",
|
650 |
+
"res = chain_extract.invoke(input={'pdf_data':pdf_data})\n",
|
651 |
+
"# print(res.content)\n",
|
652 |
+
"\n",
|
653 |
+
"# we got the json format of the job description"
|
654 |
+
]
|
655 |
+
},
|
656 |
+
{
|
657 |
+
"cell_type": "code",
|
658 |
+
"execution_count": 18,
|
659 |
+
"id": "ee305e3c-8fbe-48e4-864f-b7c304badf6f",
|
660 |
+
"metadata": {},
|
661 |
+
"outputs": [
|
662 |
+
{
|
663 |
+
"data": {
|
664 |
+
"text/plain": [
|
665 |
+
"str"
|
666 |
+
]
|
667 |
+
},
|
668 |
+
"execution_count": 18,
|
669 |
+
"metadata": {},
|
670 |
+
"output_type": "execute_result"
|
671 |
+
}
|
672 |
+
],
|
673 |
+
"source": [
|
674 |
+
"type(res.content)"
|
675 |
+
]
|
676 |
+
},
|
677 |
+
{
|
678 |
+
"cell_type": "code",
|
679 |
+
"execution_count": 20,
|
680 |
+
"id": "00ce2429-980b-421c-8477-850bca42540e",
|
681 |
+
"metadata": {},
|
682 |
+
"outputs": [],
|
683 |
+
"source": [
|
684 |
+
"# res.content"
|
685 |
+
]
|
686 |
+
},
|
687 |
+
{
|
688 |
+
"cell_type": "code",
|
689 |
+
"execution_count": 21,
|
690 |
+
"id": "2e2e2faa-9e9d-48c3-871c-e388cb74e4cf",
|
691 |
+
"metadata": {},
|
692 |
+
"outputs": [],
|
693 |
+
"source": [
|
694 |
+
"from langchain_core.output_parsers import JsonOutputParser\n",
|
695 |
+
"\n",
|
696 |
+
"json_parser = JsonOutputParser()\n",
|
697 |
+
"json_res = json_parser.parse(res.content)\n",
|
698 |
+
"# json_res"
|
699 |
+
]
|
700 |
+
},
|
701 |
+
{
|
702 |
+
"cell_type": "code",
|
703 |
+
"execution_count": 22,
|
704 |
+
"id": "60a3176d-2e55-4a8b-b38d-9dd35bce1dc8",
|
705 |
+
"metadata": {},
|
706 |
+
"outputs": [
|
707 |
+
{
|
708 |
+
"data": {
|
709 |
+
"text/plain": [
|
710 |
+
"dict"
|
711 |
+
]
|
712 |
+
},
|
713 |
+
"execution_count": 22,
|
714 |
+
"metadata": {},
|
715 |
+
"output_type": "execute_result"
|
716 |
+
}
|
717 |
+
],
|
718 |
+
"source": [
|
719 |
+
"type(json_res)\n"
|
720 |
+
]
|
721 |
+
},
|
722 |
+
{
|
723 |
+
"cell_type": "code",
|
724 |
+
"execution_count": 23,
|
725 |
+
"id": "1221640b-52c6-47e1-9250-7db11636d14a",
|
726 |
+
"metadata": {},
|
727 |
+
"outputs": [
|
728 |
+
{
|
729 |
+
"data": {
|
730 |
+
"text/plain": [
|
731 |
+
"{'name': 'Mandar Bhalerao',\n",
|
732 |
+
" 'education': [{'institution': 'Indian Institute of Science',\n",
|
733 |
+
" 'degree': 'Master of Technology - Computer Science and Automation',\n",
|
734 |
+
" 'cgpa': '7.30/10.0',\n",
|
735 |
+
" 'duration': 'Aug 2023 – Jul 2025',\n",
|
736 |
+
" 'location': 'Bangalore, Karnataka'},\n",
|
737 |
+
" {'institution': 'P.E.S. Modern College of Engineering',\n",
|
738 |
+
" 'degree': 'Bachelor of Engineering - Computer Engineering',\n",
|
739 |
+
" 'cgpa': '9.34/10.0',\n",
|
740 |
+
" 'duration': 'Aug 2019 – Jul 2023',\n",
|
741 |
+
" 'location': 'Pune, Maharashtra'}],\n",
|
742 |
+
" 'experience': [{'company': 'NeuroPixel.AI',\n",
|
743 |
+
" 'position': 'Deep Learning Research Intern',\n",
|
744 |
+
" 'duration': 'May 2024 – July 2024',\n",
|
745 |
+
" 'achievements': ['Worked on optimization of Stable Diffusion models to improve performance, achieving significant efficiency gains.',\n",
|
746 |
+
" 'Implemented the Hyper-SD framework to enhance image synthesis efficiency by Knowledge Distillation techniques.',\n",
|
747 |
+
" 'Combined the advantages of Trajectory Preserving and Reformulation Distillation techniques for faster inference.',\n",
|
748 |
+
" 'Trained a Control Net for SDXL, resulting in a 30% improvement of the inference steps from the base SDXL model.']},\n",
|
749 |
+
" {'company': 'Western Union',\n",
|
750 |
+
" 'position': 'Software Intern',\n",
|
751 |
+
" 'duration': 'Jan 2023 – Jun 2023',\n",
|
752 |
+
" 'achievements': ['Engaged with Quantum Metric to enrich powerful UX analysis, streamlining user experience design and functionality.',\n",
|
753 |
+
" 'Analyzed global transaction data, identified bugs, and implementing solutions that boosted conversion rates by 10%.']},\n",
|
754 |
+
" {'company': 'Amazon Web Services',\n",
|
755 |
+
" 'position': 'Intern',\n",
|
756 |
+
" 'duration': 'Oct 2021 – Dec 2021',\n",
|
757 |
+
" 'achievements': ['Acquired foundational skills in AWS, utilizing essential tools and services to support scalable cloud architectures.',\n",
|
758 |
+
" 'Completed the Solutions Architect Project and gained insights about the need of Cloud and AWS in today’s world.']}],\n",
|
759 |
+
" 'projects': [{'name': 'Gurgaon Real Estate Price Prediction',\n",
|
760 |
+
" 'technologies': 'Machine Learning, AWS',\n",
|
761 |
+
" 'achievements': ['Conducted data preprocessing, feature engineering, and performed EDA to optimize model performance.',\n",
|
762 |
+
" 'Experimented different models including Linear Regression, Decision Tree, Random Forest, XGBoost etc.',\n",
|
763 |
+
" 'Achieved a best R² score of 0.90 and a Mean Absolute Error (MAE) of 44 lakhs with the RandomForest model.',\n",
|
764 |
+
" 'Created a Geo-map for sectors in Gurgaon with color-coded pricing making it easy for the user to select property.',\n",
|
765 |
+
" 'Developed a dual-layer recommendation system to boost user engagement by suggesting top 5 properties to the user.',\n",
|
766 |
+
" 'Deployed the modules using Streamlit and AWS, enabling real-time access and interactive analytics for end-users.']},\n",
|
767 |
+
" {'name': 'Optimizing Performance of Dilated Convolution',\n",
|
768 |
+
" 'technologies': 'C++, CUDA',\n",
|
769 |
+
" 'achievements': ['Implemented different optimization methods to reduce the overall time required for Dialated Convolution.',\n",
|
770 |
+
" 'Optimized it using single threading and achieved a maximum improvement of 85.77%.',\n",
|
771 |
+
" 'Achieved a maximum improvement of 96% through multi-threading by changing the number of threads.',\n",
|
772 |
+
" 'Implemented it for a GPU using CUDA resulting in the speedup of 600.47 and improvement of 99.83%.']},\n",
|
773 |
+
" {'name': 'Movie Recommendation System',\n",
|
774 |
+
" 'technologies': 'Python',\n",
|
775 |
+
" 'achievements': ['Created an end to end Machine Learning project using Streamlit framework in Python and movies dataset from Kaggle.',\n",
|
776 |
+
" 'Developed a Content based Recommendation System using cosine similarity to analyze similarities among 5000 movies.',\n",
|
777 |
+
" 'Successfully deployed the application on Streamlit Community Cloud, enabling real-time user interactions and feedback.']}],\n",
|
778 |
+
" 'skills': {'languages': ['Python', 'C++'],\n",
|
779 |
+
" 'developerTools': ['VS Code', 'Jupyter Notebook', 'Google Colab'],\n",
|
780 |
+
" 'technical': ['Neural Networks',\n",
|
781 |
+
" 'Machine Learning',\n",
|
782 |
+
" 'Deep Learning',\n",
|
783 |
+
" 'Gen AI',\n",
|
784 |
+
" 'Natural Language Processing (NLP)']},\n",
|
785 |
+
" 'achievements': [{'position': 'Teaching Assistant',\n",
|
786 |
+
" 'course': 'UENG-101 Algorithms and Programming',\n",
|
787 |
+
" 'professors': ['Prof. Y.Narahari', 'Prof. Viraj Kumar']},\n",
|
788 |
+
" {'position': 'First Position',\n",
|
789 |
+
" 'competition': 'Chase The Py By CODEFIESTA 2022'},\n",
|
790 |
+
" {'position': 'Global Rank of 157',\n",
|
791 |
+
" 'competition': 'February Long Challenge at Codechef'},\n",
|
792 |
+
" {'position': 'Gold badges',\n",
|
793 |
+
" 'domains': ['Python', 'C++', 'Problem Solving Domain'],\n",
|
794 |
+
" 'platform': 'HackerRank'}]}"
|
795 |
+
]
|
796 |
+
},
|
797 |
+
"execution_count": 23,
|
798 |
+
"metadata": {},
|
799 |
+
"output_type": "execute_result"
|
800 |
+
}
|
801 |
+
],
|
802 |
+
"source": [
|
803 |
+
"json_res"
|
804 |
+
]
|
805 |
+
},
|
806 |
+
{
|
807 |
+
"cell_type": "code",
|
808 |
+
"execution_count": 24,
|
809 |
+
"id": "5535e79d-13be-47fe-a0f7-3ce9a3db314b",
|
810 |
+
"metadata": {},
|
811 |
+
"outputs": [],
|
812 |
+
"source": [
|
813 |
+
"candidate_resume_details = json_res"
|
814 |
+
]
|
815 |
+
},
|
816 |
+
{
|
817 |
+
"cell_type": "code",
|
818 |
+
"execution_count": 25,
|
819 |
+
"id": "1e0616aa-375c-4b12-bcb3-bf1aed6afc34",
|
820 |
+
"metadata": {},
|
821 |
+
"outputs": [
|
822 |
+
{
|
823 |
+
"data": {
|
824 |
+
"text/plain": [
|
825 |
+
"dict"
|
826 |
+
]
|
827 |
+
},
|
828 |
+
"execution_count": 25,
|
829 |
+
"metadata": {},
|
830 |
+
"output_type": "execute_result"
|
831 |
+
}
|
832 |
+
],
|
833 |
+
"source": [
|
834 |
+
"type(candidate_resume_details)"
|
835 |
+
]
|
836 |
+
},
|
837 |
+
{
|
838 |
+
"cell_type": "code",
|
839 |
+
"execution_count": 26,
|
840 |
+
"id": "2db48d13-0e93-403f-8f51-b0dfc9ca9098",
|
841 |
+
"metadata": {},
|
842 |
+
"outputs": [
|
843 |
+
{
|
844 |
+
"data": {
|
845 |
+
"text/plain": [
|
846 |
+
"{'name': 'Mandar Bhalerao',\n",
|
847 |
+
" 'education': [{'institution': 'Indian Institute of Science',\n",
|
848 |
+
" 'degree': 'Master of Technology - Computer Science and Automation',\n",
|
849 |
+
" 'cgpa': '7.30/10.0',\n",
|
850 |
+
" 'duration': 'Aug 2023 – Jul 2025',\n",
|
851 |
+
" 'location': 'Bangalore, Karnataka'},\n",
|
852 |
+
" {'institution': 'P.E.S. Modern College of Engineering',\n",
|
853 |
+
" 'degree': 'Bachelor of Engineering - Computer Engineering',\n",
|
854 |
+
" 'cgpa': '9.34/10.0',\n",
|
855 |
+
" 'duration': 'Aug 2019 – Jul 2023',\n",
|
856 |
+
" 'location': 'Pune, Maharashtra'}],\n",
|
857 |
+
" 'experience': [{'company': 'NeuroPixel.AI',\n",
|
858 |
+
" 'position': 'Deep Learning Research Intern',\n",
|
859 |
+
" 'duration': 'May 2024 – July 2024',\n",
|
860 |
+
" 'achievements': ['Worked on optimization of Stable Diffusion models to improve performance, achieving significant efficiency gains.',\n",
|
861 |
+
" 'Implemented the Hyper-SD framework to enhance image synthesis efficiency by Knowledge Distillation techniques.',\n",
|
862 |
+
" 'Combined the advantages of Trajectory Preserving and Reformulation Distillation techniques for faster inference.',\n",
|
863 |
+
" 'Trained a Control Net for SDXL, resulting in a 30% improvement of the inference steps from the base SDXL model.']},\n",
|
864 |
+
" {'company': 'Western Union',\n",
|
865 |
+
" 'position': 'Software Intern',\n",
|
866 |
+
" 'duration': 'Jan 2023 – Jun 2023',\n",
|
867 |
+
" 'achievements': ['Engaged with Quantum Metric to enrich powerful UX analysis, streamlining user experience design and functionality.',\n",
|
868 |
+
" 'Analyzed global transaction data, identified bugs, and implementing solutions that boosted conversion rates by 10%.']},\n",
|
869 |
+
" {'company': 'Amazon Web Services',\n",
|
870 |
+
" 'position': 'Intern',\n",
|
871 |
+
" 'duration': 'Oct 2021 – Dec 2021',\n",
|
872 |
+
" 'achievements': ['Acquired foundational skills in AWS, utilizing essential tools and services to support scalable cloud architectures.',\n",
|
873 |
+
" 'Completed the Solutions Architect Project and gained insights about the need of Cloud and AWS in today’s world.']}],\n",
|
874 |
+
" 'projects': [{'name': 'Gurgaon Real Estate Price Prediction',\n",
|
875 |
+
" 'technologies': 'Machine Learning, AWS',\n",
|
876 |
+
" 'achievements': ['Conducted data preprocessing, feature engineering, and performed EDA to optimize model performance.',\n",
|
877 |
+
" 'Experimented different models including Linear Regression, Decision Tree, Random Forest, XGBoost etc.',\n",
|
878 |
+
" 'Achieved a best R² score of 0.90 and a Mean Absolute Error (MAE) of 44 lakhs with the RandomForest model.',\n",
|
879 |
+
" 'Created a Geo-map for sectors in Gurgaon with color-coded pricing making it easy for the user to select property.',\n",
|
880 |
+
" 'Developed a dual-layer recommendation system to boost user engagement by suggesting top 5 properties to the user.',\n",
|
881 |
+
" 'Deployed the modules using Streamlit and AWS, enabling real-time access and interactive analytics for end-users.']},\n",
|
882 |
+
" {'name': 'Optimizing Performance of Dilated Convolution',\n",
|
883 |
+
" 'technologies': 'C++, CUDA',\n",
|
884 |
+
" 'achievements': ['Implemented different optimization methods to reduce the overall time required for Dialated Convolution.',\n",
|
885 |
+
" 'Optimized it using single threading and achieved a maximum improvement of 85.77%.',\n",
|
886 |
+
" 'Achieved a maximum improvement of 96% through multi-threading by changing the number of threads.',\n",
|
887 |
+
" 'Implemented it for a GPU using CUDA resulting in the speedup of 600.47 and improvement of 99.83%.']},\n",
|
888 |
+
" {'name': 'Movie Recommendation System',\n",
|
889 |
+
" 'technologies': 'Python',\n",
|
890 |
+
" 'achievements': ['Created an end to end Machine Learning project using Streamlit framework in Python and movies dataset from Kaggle.',\n",
|
891 |
+
" 'Developed a Content based Recommendation System using cosine similarity to analyze similarities among 5000 movies.',\n",
|
892 |
+
" 'Successfully deployed the application on Streamlit Community Cloud, enabling real-time user interactions and feedback.']}],\n",
|
893 |
+
" 'skills': {'languages': ['Python', 'C++'],\n",
|
894 |
+
" 'developerTools': ['VS Code', 'Jupyter Notebook', 'Google Colab'],\n",
|
895 |
+
" 'technical': ['Neural Networks',\n",
|
896 |
+
" 'Machine Learning',\n",
|
897 |
+
" 'Deep Learning',\n",
|
898 |
+
" 'Gen AI',\n",
|
899 |
+
" 'Natural Language Processing (NLP)']},\n",
|
900 |
+
" 'achievements': [{'position': 'Teaching Assistant',\n",
|
901 |
+
" 'course': 'UENG-101 Algorithms and Programming',\n",
|
902 |
+
" 'professors': ['Prof. Y.Narahari', 'Prof. Viraj Kumar']},\n",
|
903 |
+
" {'position': 'First Position',\n",
|
904 |
+
" 'competition': 'Chase The Py By CODEFIESTA 2022'},\n",
|
905 |
+
" {'position': 'Global Rank of 157',\n",
|
906 |
+
" 'competition': 'February Long Challenge at Codechef'},\n",
|
907 |
+
" {'position': 'Gold badges',\n",
|
908 |
+
" 'domains': ['Python', 'C++', 'Problem Solving Domain'],\n",
|
909 |
+
" 'platform': 'HackerRank'}]}"
|
910 |
+
]
|
911 |
+
},
|
912 |
+
"execution_count": 26,
|
913 |
+
"metadata": {},
|
914 |
+
"output_type": "execute_result"
|
915 |
+
}
|
916 |
+
],
|
917 |
+
"source": [
|
918 |
+
"candidate_resume_details"
|
919 |
+
]
|
920 |
+
},
|
921 |
+
{
|
922 |
+
"cell_type": "code",
|
923 |
+
"execution_count": 27,
|
924 |
+
"id": "44a367c8-99b0-4573-8b5e-a72ae180e063",
|
925 |
+
"metadata": {},
|
926 |
+
"outputs": [
|
927 |
+
{
|
928 |
+
"data": {
|
929 |
+
"text/plain": [
|
930 |
+
"{'role': 'Data Scientist',\n",
|
931 |
+
" 'experience': '1+ years of relevant industry experience with a Bachelor’s degree or Master’s/PhD in Computer Science, Mathematics, Statistics/related fields',\n",
|
932 |
+
" 'skills': ['Python or one other high-level programming language',\n",
|
933 |
+
" 'Theoretical understanding of statistical models such as regression, clustering and ML algorithms such as decision trees, neural networks, etc.',\n",
|
934 |
+
" 'Machine learning frameworks like TensorFlow, PyTorch, or scikit-learn',\n",
|
935 |
+
" 'SQL and/or NoSQL databases'],\n",
|
936 |
+
" 'description': 'Design, develop and deploy machine learning models, algorithms and systems to solve complex business problems for Myntra Recsys, Search, Vision, SCM, Pricing, Forecasting, Trend and Virality prediction, Gen AI and other areas. Theoretical understanding and practise of machine learning and expertise in one or more of the topics, such as, NLP, Computer Vision, recommender systems and Optimisation.'}"
|
937 |
+
]
|
938 |
+
},
|
939 |
+
"execution_count": 27,
|
940 |
+
"metadata": {},
|
941 |
+
"output_type": "execute_result"
|
942 |
+
}
|
943 |
+
],
|
944 |
+
"source": [
|
945 |
+
"job_description"
|
946 |
+
]
|
947 |
+
},
|
948 |
+
{
|
949 |
+
"cell_type": "code",
|
950 |
+
"execution_count": 28,
|
951 |
+
"id": "644119b0-cbc0-488c-ba01-d7ab5026862e",
|
952 |
+
"metadata": {},
|
953 |
+
"outputs": [],
|
954 |
+
"source": [
|
955 |
+
"def generate_cold_email(details):\n",
|
956 |
+
" # Extract name\n",
|
957 |
+
" name = details.get('name', 'Candidate')\n",
|
958 |
+
"\n",
|
959 |
+
" # Extract education details\n",
|
960 |
+
" education_list = details.get('education', [])\n",
|
961 |
+
" if education_list:\n",
|
962 |
+
" education_details = ', '.join([f\"{edu.get('degree', 'Unknown degree')} from {edu.get('institution', 'Unknown institution')} ({edu.get('duration', 'Unknown duration')})\" for edu in education_list])\n",
|
963 |
+
" else:\n",
|
964 |
+
" education_details = 'No education details provided'\n",
|
965 |
+
"\n",
|
966 |
+
" # Extract skills details\n",
|
967 |
+
" skills_list = details.get('skills', [])\n",
|
968 |
+
" if skills_list:\n",
|
969 |
+
" skills_details = ', '.join([', '.join(skill.get('tools', [])) for skill in skills_list])\n",
|
970 |
+
" else:\n",
|
971 |
+
" skills_details = 'No skills listed'\n",
|
972 |
+
"\n",
|
973 |
+
" # Extract experience details\n",
|
974 |
+
" experience_list = details.get('experience', [])\n",
|
975 |
+
" if experience_list:\n",
|
976 |
+
" experience_details = []\n",
|
977 |
+
" for exp in experience_list:\n",
|
978 |
+
" position = exp.get('position', 'Unknown position')\n",
|
979 |
+
" company = exp.get('company', 'Unknown company')\n",
|
980 |
+
" duration = exp.get('duration', 'Unknown duration')\n",
|
981 |
+
" achievements = exp.get('achievements', [])\n",
|
982 |
+
" achievements_details = ', '.join(achievements) if achievements else 'No achievements mentioned'\n",
|
983 |
+
" experience_details.append(f\"{position} at {company} ({duration}): {achievements_details}\")\n",
|
984 |
+
" experience_details = ', '.join(experience_details)\n",
|
985 |
+
" else:\n",
|
986 |
+
" experience_details = 'No experience provided'\n",
|
987 |
+
"\n",
|
988 |
+
" # Extract project details\n",
|
989 |
+
" project_list = details.get('projects', [])\n",
|
990 |
+
" if project_list:\n",
|
991 |
+
" project_details = []\n",
|
992 |
+
" for project in project_list:\n",
|
993 |
+
" project_name = project.get('name', 'Unknown project')\n",
|
994 |
+
" project_description = ', '.join(project.get('description', [])) if project.get('description') else 'No details provided'\n",
|
995 |
+
" project_details.append(f\"{project_name}: {project_description}\")\n",
|
996 |
+
" project_details = ', '.join(project_details)\n",
|
997 |
+
" else:\n",
|
998 |
+
" project_details = 'No projects listed'\n",
|
999 |
+
"\n",
|
1000 |
+
" # Extract achievements details\n",
|
1001 |
+
" achievement_list = details.get('achievements', [])\n",
|
1002 |
+
" if achievement_list:\n",
|
1003 |
+
" achievement_details = ', '.join([f\"{achieve.get('position', 'Unknown position')} - {achieve.get('description', 'Unknown achievement')}\" for achieve in achievement_list])\n",
|
1004 |
+
" else:\n",
|
1005 |
+
" achievement_details = 'No achievements listed'\n",
|
1006 |
+
"\n",
|
1007 |
+
" email_prompt = f\"\"\"\n",
|
1008 |
+
" You are {name}, educated at {education_details}. Your skills include {skills_details}.\n",
|
1009 |
+
"\n",
|
1010 |
+
" You have experience as {experience_details}.\n",
|
1011 |
+
" \n",
|
1012 |
+
" Some of your key projects include {project_details}.\n",
|
1013 |
+
" \n",
|
1014 |
+
" Additionally, your achievements include {achievement_details}.\n",
|
1015 |
+
" \n",
|
1016 |
+
" Write a cold email to a potential employer or client, showcasing your skills, education, experience (including responsibilities and achievements), projects (with descriptions), and achievements. Explain how your background makes you an ideal candidate for their needs.\n",
|
1017 |
+
"\n",
|
1018 |
+
" Remember, you are {name}, ready to make a significant impact in your new role.\n",
|
1019 |
+
" \"\"\"\n",
|
1020 |
+
" return email_prompt\n"
|
1021 |
+
]
|
1022 |
+
},
|
1023 |
+
{
|
1024 |
+
"cell_type": "code",
|
1025 |
+
"execution_count": 29,
|
1026 |
+
"id": "489fdb89-4092-4e2b-b93f-f82fef33a552",
|
1027 |
+
"metadata": {},
|
1028 |
+
"outputs": [
|
1029 |
+
{
|
1030 |
+
"ename": "AttributeError",
|
1031 |
+
"evalue": "'str' object has no attribute 'get'",
|
1032 |
+
"output_type": "error",
|
1033 |
+
"traceback": [
|
1034 |
+
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
|
1035 |
+
"\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)",
|
1036 |
+
"Cell \u001b[1;32mIn[29], line 1\u001b[0m\n\u001b[1;32m----> 1\u001b[0m \u001b[43mgenerate_cold_email\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcandidate_resume_details\u001b[49m\u001b[43m)\u001b[49m\n",
|
1037 |
+
"Cell \u001b[1;32mIn[28], line 15\u001b[0m, in \u001b[0;36mgenerate_cold_email\u001b[1;34m(details)\u001b[0m\n\u001b[0;32m 13\u001b[0m skills_list \u001b[38;5;241m=\u001b[39m details\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mskills\u001b[39m\u001b[38;5;124m'\u001b[39m, [])\n\u001b[0;32m 14\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m skills_list:\n\u001b[1;32m---> 15\u001b[0m skills_details \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin([\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(skill\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtools\u001b[39m\u001b[38;5;124m'\u001b[39m, [])) \u001b[38;5;28;01mfor\u001b[39;00m skill \u001b[38;5;129;01min\u001b[39;00m skills_list])\n\u001b[0;32m 16\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 17\u001b[0m skills_details \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mNo skills listed\u001b[39m\u001b[38;5;124m'\u001b[39m\n",
|
1038 |
+
"Cell \u001b[1;32mIn[28], line 15\u001b[0m, in \u001b[0;36m<listcomp>\u001b[1;34m(.0)\u001b[0m\n\u001b[0;32m 13\u001b[0m skills_list \u001b[38;5;241m=\u001b[39m details\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mskills\u001b[39m\u001b[38;5;124m'\u001b[39m, [])\n\u001b[0;32m 14\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m skills_list:\n\u001b[1;32m---> 15\u001b[0m skills_details \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin([\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(\u001b[43mskill\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtools\u001b[39m\u001b[38;5;124m'\u001b[39m, [])) \u001b[38;5;28;01mfor\u001b[39;00m skill \u001b[38;5;129;01min\u001b[39;00m skills_list])\n\u001b[0;32m 16\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 17\u001b[0m skills_details \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mNo skills listed\u001b[39m\u001b[38;5;124m'\u001b[39m\n",
|
1039 |
+
"\u001b[1;31mAttributeError\u001b[0m: 'str' object has no attribute 'get'"
|
1040 |
+
]
|
1041 |
+
}
|
1042 |
+
],
|
1043 |
+
"source": [
|
1044 |
+
"generate_cold_email(candidate_resume_details)"
|
1045 |
+
]
|
1046 |
+
},
|
1047 |
+
{
|
1048 |
+
"cell_type": "code",
|
1049 |
+
"execution_count": 30,
|
1050 |
+
"id": "d85bbec6-ccc6-4b68-ab9d-5d7a32a14c72",
|
1051 |
+
"metadata": {},
|
1052 |
+
"outputs": [],
|
1053 |
+
"source": [
|
1054 |
+
"def generate_cold_email(details):\n",
|
1055 |
+
" # Extract name\n",
|
1056 |
+
" name = details.get('name', 'Candidate')\n",
|
1057 |
+
"\n",
|
1058 |
+
" # Extract education details\n",
|
1059 |
+
" education_list = details.get('education', [])\n",
|
1060 |
+
" if education_list:\n",
|
1061 |
+
" education_details = ', '.join([f\"{edu.get('degree', 'Unknown degree')} from {edu.get('institution', 'Unknown institution')} ({edu.get('duration', 'Unknown duration')})\" for edu in education_list])\n",
|
1062 |
+
" else:\n",
|
1063 |
+
" education_details = 'No education details provided'\n",
|
1064 |
+
"\n",
|
1065 |
+
" # Extract skills details\n",
|
1066 |
+
" skills_list = details.get('skills', [])\n",
|
1067 |
+
" if skills_list:\n",
|
1068 |
+
" skills_details = ', '.join([\n",
|
1069 |
+
" ', '.join(skill.get('tools', [])) if isinstance(skill, dict) else str(skill) \n",
|
1070 |
+
" for skill in skills_list\n",
|
1071 |
+
" ])\n",
|
1072 |
+
" else:\n",
|
1073 |
+
" skills_details = 'No skills listed'\n",
|
1074 |
+
"\n",
|
1075 |
+
" # Extract experience details\n",
|
1076 |
+
" experience_list = details.get('experience', [])\n",
|
1077 |
+
" if experience_list:\n",
|
1078 |
+
" experience_details = []\n",
|
1079 |
+
" for exp in experience_list:\n",
|
1080 |
+
" position = exp.get('position', 'Unknown position')\n",
|
1081 |
+
" company = exp.get('company', 'Unknown company')\n",
|
1082 |
+
" duration = exp.get('duration', 'Unknown duration')\n",
|
1083 |
+
" achievements = exp.get('achievements', [])\n",
|
1084 |
+
" achievements_details = ', '.join(achievements) if achievements else 'No achievements mentioned'\n",
|
1085 |
+
" experience_details.append(f\"{position} at {company} ({duration}): {achievements_details}\")\n",
|
1086 |
+
" experience_details = ', '.join(experience_details)\n",
|
1087 |
+
" else:\n",
|
1088 |
+
" experience_details = 'No experience provided'\n",
|
1089 |
+
"\n",
|
1090 |
+
" # Extract project details\n",
|
1091 |
+
" project_list = details.get('projects', [])\n",
|
1092 |
+
" if project_list:\n",
|
1093 |
+
" project_details = []\n",
|
1094 |
+
" for project in project_list:\n",
|
1095 |
+
" project_name = project.get('name', 'Unknown project')\n",
|
1096 |
+
" project_description = ', '.join(project.get('description', [])) if project.get('description') else 'No details provided'\n",
|
1097 |
+
" project_details.append(f\"{project_name}: {project_description}\")\n",
|
1098 |
+
" project_details = ', '.join(project_details)\n",
|
1099 |
+
" else:\n",
|
1100 |
+
" project_details = 'No projects listed'\n",
|
1101 |
+
"\n",
|
1102 |
+
" # Extract achievements details\n",
|
1103 |
+
" achievement_list = details.get('achievements', [])\n",
|
1104 |
+
" if achievement_list:\n",
|
1105 |
+
" achievement_details = ', '.join([f\"{achieve.get('position', 'Unknown position')} - {achieve.get('description', 'Unknown achievement')}\" for achieve in achievement_list])\n",
|
1106 |
+
" else:\n",
|
1107 |
+
" achievement_details = 'No achievements listed'\n",
|
1108 |
+
"\n",
|
1109 |
+
" email_prompt = f\"\"\"\n",
|
1110 |
+
" You are {name}, educated at {education_details}. Your skills include {skills_details}.\n",
|
1111 |
+
" \n",
|
1112 |
+
" You have experience as {experience_details}.\n",
|
1113 |
+
" \n",
|
1114 |
+
" Some of your key projects include {project_details}.\n",
|
1115 |
+
" \n",
|
1116 |
+
" Additionally, your achievements include {achievement_details}.\n",
|
1117 |
+
" \n",
|
1118 |
+
" Write a cold email to a potential employer or client, showcasing your skills, education, experience (including responsibilities and achievements), projects (with descriptions), and achievements. Explain how your background makes you an ideal candidate for their needs.\n",
|
1119 |
+
"\n",
|
1120 |
+
" Remember, you are {name}, ready to make a significant impact in your new role.\n",
|
1121 |
+
" \"\"\"\n",
|
1122 |
+
" return email_prompt\n"
|
1123 |
+
]
|
1124 |
+
},
|
1125 |
+
{
|
1126 |
+
"cell_type": "code",
|
1127 |
+
"execution_count": 31,
|
1128 |
+
"id": "e2ebd08c-e562-41fc-bede-204c67cd693b",
|
1129 |
+
"metadata": {},
|
1130 |
+
"outputs": [
|
1131 |
+
{
|
1132 |
+
"data": {
|
1133 |
+
"text/plain": [
|
1134 |
+
"'\\n You are Mandar Bhalerao, educated at Master of Technology - Computer Science and Automation from Indian Institute of Science (Aug 2023 – Jul 2025), Bachelor of Engineering - Computer Engineering from P.E.S. Modern College of Engineering (Aug 2019 – Jul 2023). Your skills include languages, developerTools, technical.\\n \\n You have experience as Deep Learning Research Intern at NeuroPixel.AI (May 2024 – July 2024): Worked on optimization of Stable Diffusion models to improve performance, achieving significant efficiency gains., Implemented the Hyper-SD framework to enhance image synthesis efficiency by Knowledge Distillation techniques., Combined the advantages of Trajectory Preserving and Reformulation Distillation techniques for faster inference., Trained a Control Net for SDXL, resulting in a 30% improvement of the inference steps from the base SDXL model., Software Intern at Western Union (Jan 2023 – Jun 2023): Engaged with Quantum Metric to enrich powerful UX analysis, streamlining user experience design and functionality., Analyzed global transaction data, identified bugs, and implementing solutions that boosted conversion rates by 10%., Intern at Amazon Web Services (Oct 2021 – Dec 2021): Acquired foundational skills in AWS, utilizing essential tools and services to support scalable cloud architectures., Completed the Solutions Architect Project and gained insights about the need of Cloud and AWS in today’s world..\\n \\n Some of your key projects include Gurgaon Real Estate Price Prediction: No details provided, Optimizing Performance of Dilated Convolution: No details provided, Movie Recommendation System: No details provided.\\n \\n Additionally, your achievements include Teaching Assistant - Unknown achievement, First Position - Unknown achievement, Global Rank of 157 - Unknown achievement, Gold badges - Unknown achievement.\\n \\n Write a cold email to a potential employer or client, showcasing your skills, education, experience (including responsibilities and achievements), projects (with descriptions), and achievements. Explain how your background makes you an ideal candidate for their needs.\\n\\n Remember, you are Mandar Bhalerao, ready to make a significant impact in your new role.\\n '"
|
1135 |
+
]
|
1136 |
+
},
|
1137 |
+
"execution_count": 31,
|
1138 |
+
"metadata": {},
|
1139 |
+
"output_type": "execute_result"
|
1140 |
+
}
|
1141 |
+
],
|
1142 |
+
"source": [
|
1143 |
+
"generate_cold_email(candidate_resume_details)"
|
1144 |
+
]
|
1145 |
+
},
|
1146 |
+
{
|
1147 |
+
"cell_type": "code",
|
1148 |
+
"execution_count": 32,
|
1149 |
+
"id": "cb5717ea-9600-401b-bb93-83728415d605",
|
1150 |
+
"metadata": {},
|
1151 |
+
"outputs": [],
|
1152 |
+
"source": [
|
1153 |
+
"def generate_cold_email(details):\n",
|
1154 |
+
" # Extract name\n",
|
1155 |
+
" name = details.get('name', 'Candidate')\n",
|
1156 |
+
"\n",
|
1157 |
+
" # Extract education details\n",
|
1158 |
+
" education_list = details.get('education', [])\n",
|
1159 |
+
" if education_list:\n",
|
1160 |
+
" education_details = ', '.join([f\"{edu.get('degree', 'Degree not specified')} from {edu.get('institution', 'Institution not specified')} ({edu.get('duration', 'Duration not specified')})\" for edu in education_list])\n",
|
1161 |
+
" else:\n",
|
1162 |
+
" education_details = 'No education details provided'\n",
|
1163 |
+
"\n",
|
1164 |
+
" # Extract skills details\n",
|
1165 |
+
" skills = details.get('skills', {})\n",
|
1166 |
+
" skills_details = []\n",
|
1167 |
+
" if isinstance(skills, dict):\n",
|
1168 |
+
" for category, tools in skills.items():\n",
|
1169 |
+
" if isinstance(tools, list):\n",
|
1170 |
+
" skills_details.append(f\"{category.capitalize()}: {', '.join(tools)}\")\n",
|
1171 |
+
" else:\n",
|
1172 |
+
" skills_details.append(f\"{category.capitalize()}: {tools}\")\n",
|
1173 |
+
" elif isinstance(skills, list):\n",
|
1174 |
+
" skills_details.append(', '.join(skills))\n",
|
1175 |
+
" else:\n",
|
1176 |
+
" skills_details = 'No skills listed'\n",
|
1177 |
+
" \n",
|
1178 |
+
" skills_details = '; '.join(skills_details) if skills_details else 'No skills listed'\n",
|
1179 |
+
"\n",
|
1180 |
+
" # Extract experience details\n",
|
1181 |
+
" experience_list = details.get('experience', [])\n",
|
1182 |
+
" if experience_list:\n",
|
1183 |
+
" experience_details = []\n",
|
1184 |
+
" for exp in experience_list:\n",
|
1185 |
+
" position = exp.get('position', 'Position not specified')\n",
|
1186 |
+
" company = exp.get('company', 'Company not specified')\n",
|
1187 |
+
" duration = exp.get('duration', 'Duration not specified')\n",
|
1188 |
+
" achievements = exp.get('achievements', [])\n",
|
1189 |
+
" if isinstance(achievements, list):\n",
|
1190 |
+
" achievements_details = ', '.join(achievements) if achievements else 'No achievements mentioned'\n",
|
1191 |
+
" else:\n",
|
1192 |
+
" achievements_details = achievements\n",
|
1193 |
+
" experience_details.append(f\"{position} at {company} ({duration}): {achievements_details}\")\n",
|
1194 |
+
" experience_details = '; '.join(experience_details)\n",
|
1195 |
+
" else:\n",
|
1196 |
+
" experience_details = 'No experience provided'\n",
|
1197 |
+
"\n",
|
1198 |
+
" # Extract project details\n",
|
1199 |
+
" project_list = details.get('projects', [])\n",
|
1200 |
+
" if project_list:\n",
|
1201 |
+
" project_details = []\n",
|
1202 |
+
" for project in project_list:\n",
|
1203 |
+
" project_name = project.get('name', 'Project name not specified')\n",
|
1204 |
+
" technologies = project.get('technologies', 'Technologies not specified')\n",
|
1205 |
+
" achievements = project.get('achievements', [])\n",
|
1206 |
+
" if isinstance(achievements, list):\n",
|
1207 |
+
" project_achievements = ', '.join(achievements) if achievements else 'No details provided'\n",
|
1208 |
+
" else:\n",
|
1209 |
+
" project_achievements = achievements\n",
|
1210 |
+
" project_details.append(f\"{project_name} (Technologies: {technologies}): {project_achievements}\")\n",
|
1211 |
+
" project_details = '; '.join(project_details)\n",
|
1212 |
+
" else:\n",
|
1213 |
+
" project_details = 'No projects listed'\n",
|
1214 |
+
"\n",
|
1215 |
+
" # Extract achievements details\n",
|
1216 |
+
" achievement_list = details.get('achievements', [])\n",
|
1217 |
+
" if achievement_list:\n",
|
1218 |
+
" achievement_details = []\n",
|
1219 |
+
" for achieve in achievement_list:\n",
|
1220 |
+
" position = achieve.get('position', 'Position not specified')\n",
|
1221 |
+
" achievement_desc = ', '.join([f\"{key.capitalize()}: {value}\" for key, value in achieve.items() if key != 'position'])\n",
|
1222 |
+
" achievement_details.append(f\"{position} - {achievement_desc}\")\n",
|
1223 |
+
" achievement_details = '; '.join(achievement_details)\n",
|
1224 |
+
" else:\n",
|
1225 |
+
" achievement_details = 'No achievements listed'\n",
|
1226 |
+
"\n",
|
1227 |
+
" email_prompt = f\"\"\"\n",
|
1228 |
+
" You are {name}, educated at {education_details}. Your skills include {skills_details}.\n",
|
1229 |
+
" \n",
|
1230 |
+
" You have experience as {experience_details}.\n",
|
1231 |
+
" \n",
|
1232 |
+
" Some of your key projects include {project_details}.\n",
|
1233 |
+
" \n",
|
1234 |
+
" Additionally, your achievements include {achievement_details}.\n",
|
1235 |
+
" \n",
|
1236 |
+
" Write a cold email to a potential employer or client, showcasing your skills, education, experience (including responsibilities and achievements), projects (with descriptions), and achievements. Explain how your background makes you an ideal candidate for their needs.\n",
|
1237 |
+
"\n",
|
1238 |
+
" Remember, you are {name}, ready to make a significant impact in your new role.\n",
|
1239 |
+
" \"\"\"\n",
|
1240 |
+
" return email_prompt"
|
1241 |
+
]
|
1242 |
+
},
|
1243 |
+
{
|
1244 |
+
"cell_type": "code",
|
1245 |
+
"execution_count": 33,
|
1246 |
+
"id": "4c0342dc-6f12-4d93-a35f-ec5754ebc212",
|
1247 |
+
"metadata": {},
|
1248 |
+
"outputs": [
|
1249 |
+
{
|
1250 |
+
"data": {
|
1251 |
+
"text/plain": [
|
1252 |
+
"\"\\n You are Mandar Bhalerao, educated at Master of Technology - Computer Science and Automation from Indian Institute of Science (Aug 2023 – Jul 2025), Bachelor of Engineering - Computer Engineering from P.E.S. Modern College of Engineering (Aug 2019 – Jul 2023). Your skills include Languages: Python, C++; Developertools: VS Code, Jupyter Notebook, Google Colab; Technical: Neural Networks, Machine Learning, Deep Learning, Gen AI, Natural Language Processing (NLP).\\n \\n You have experience as Deep Learning Research Intern at NeuroPixel.AI (May 2024 – July 2024): Worked on optimization of Stable Diffusion models to improve performance, achieving significant efficiency gains., Implemented the Hyper-SD framework to enhance image synthesis efficiency by Knowledge Distillation techniques., Combined the advantages of Trajectory Preserving and Reformulation Distillation techniques for faster inference., Trained a Control Net for SDXL, resulting in a 30% improvement of the inference steps from the base SDXL model.; Software Intern at Western Union (Jan 2023 – Jun 2023): Engaged with Quantum Metric to enrich powerful UX analysis, streamlining user experience design and functionality., Analyzed global transaction data, identified bugs, and implementing solutions that boosted conversion rates by 10%.; Intern at Amazon Web Services (Oct 2021 – Dec 2021): Acquired foundational skills in AWS, utilizing essential tools and services to support scalable cloud architectures., Completed the Solutions Architect Project and gained insights about the need of Cloud and AWS in today’s world..\\n \\n Some of your key projects include Gurgaon Real Estate Price Prediction (Technologies: Machine Learning, AWS): Conducted data preprocessing, feature engineering, and performed EDA to optimize model performance., Experimented different models including Linear Regression, Decision Tree, Random Forest, XGBoost etc., Achieved a best R² score of 0.90 and a Mean Absolute Error (MAE) of 44 lakhs with the RandomForest model., Created a Geo-map for sectors in Gurgaon with color-coded pricing making it easy for the user to select property., Developed a dual-layer recommendation system to boost user engagement by suggesting top 5 properties to the user., Deployed the modules using Streamlit and AWS, enabling real-time access and interactive analytics for end-users.; Optimizing Performance of Dilated Convolution (Technologies: C++, CUDA): Implemented different optimization methods to reduce the overall time required for Dialated Convolution., Optimized it using single threading and achieved a maximum improvement of 85.77%., Achieved a maximum improvement of 96% through multi-threading by changing the number of threads., Implemented it for a GPU using CUDA resulting in the speedup of 600.47 and improvement of 99.83%.; Movie Recommendation System (Technologies: Python): Created an end to end Machine Learning project using Streamlit framework in Python and movies dataset from Kaggle., Developed a Content based Recommendation System using cosine similarity to analyze similarities among 5000 movies., Successfully deployed the application on Streamlit Community Cloud, enabling real-time user interactions and feedback..\\n \\n Additionally, your achievements include Teaching Assistant - Course: UENG-101 Algorithms and Programming, Professors: ['Prof. Y.Narahari', 'Prof. Viraj Kumar']; First Position - Competition: Chase The Py By CODEFIESTA 2022; Global Rank of 157 - Competition: February Long Challenge at Codechef; Gold badges - Domains: ['Python', 'C++', 'Problem Solving Domain'], Platform: HackerRank.\\n \\n Write a cold email to a potential employer or client, showcasing your skills, education, experience (including responsibilities and achievements), projects (with descriptions), and achievements. Explain how your background makes you an ideal candidate for their needs.\\n\\n Remember, you are Mandar Bhalerao, ready to make a significant impact in your new role.\\n \""
|
1253 |
+
]
|
1254 |
+
},
|
1255 |
+
"execution_count": 33,
|
1256 |
+
"metadata": {},
|
1257 |
+
"output_type": "execute_result"
|
1258 |
+
}
|
1259 |
+
],
|
1260 |
+
"source": [
|
1261 |
+
"generate_cold_email(candidate_resume_details)\n"
|
1262 |
+
]
|
1263 |
+
},
|
1264 |
+
{
|
1265 |
+
"cell_type": "code",
|
1266 |
+
"execution_count": 35,
|
1267 |
+
"id": "d95500fa-0688-4969-99e4-b181ec7a2ed2",
|
1268 |
+
"metadata": {},
|
1269 |
+
"outputs": [
|
1270 |
+
{
|
1271 |
+
"data": {
|
1272 |
+
"text/plain": [
|
1273 |
+
"str"
|
1274 |
+
]
|
1275 |
+
},
|
1276 |
+
"execution_count": 35,
|
1277 |
+
"metadata": {},
|
1278 |
+
"output_type": "execute_result"
|
1279 |
+
}
|
1280 |
+
],
|
1281 |
+
"source": [
|
1282 |
+
"resume_details = generate_cold_email(candidate_resume_details)\n",
|
1283 |
+
"type(resume_details)"
|
1284 |
+
]
|
1285 |
+
},
|
1286 |
+
{
|
1287 |
+
"cell_type": "code",
|
1288 |
+
"execution_count": 36,
|
1289 |
+
"id": "47ca8cd9-9294-4d8b-9486-fbbe718deabb",
|
1290 |
+
"metadata": {},
|
1291 |
+
"outputs": [],
|
1292 |
+
"source": [
|
1293 |
+
"# this is prompt template for writing an email\n",
|
1294 |
+
"\n",
|
1295 |
+
"prompt_email = PromptTemplate.from_template(\n",
|
1296 |
+
" \"\"\"\n",
|
1297 |
+
" ### JOB DESCRIPTION:\n",
|
1298 |
+
" {job_description}\n",
|
1299 |
+
"\n",
|
1300 |
+
" ### INSTRUCTION:\n",
|
1301 |
+
" Introduce yourself from the below details\n",
|
1302 |
+
" {resume_details}\n",
|
1303 |
+
" End the email with Name and Designation. \n",
|
1304 |
+
" Do not provide a preamble.\n",
|
1305 |
+
" ### EMAIL (NO PREAMBLE):\n",
|
1306 |
+
"\n",
|
1307 |
+
" \"\"\"\n",
|
1308 |
+
" )"
|
1309 |
+
]
|
1310 |
+
},
|
1311 |
+
{
|
1312 |
+
"cell_type": "code",
|
1313 |
+
"execution_count": 54,
|
1314 |
+
"id": "ed8d8c37-5e74-46ae-8410-2ad131e9fc77",
|
1315 |
+
"metadata": {},
|
1316 |
+
"outputs": [],
|
1317 |
+
"source": [
|
1318 |
+
"# job_description"
|
1319 |
+
]
|
1320 |
+
},
|
1321 |
+
{
|
1322 |
+
"cell_type": "code",
|
1323 |
+
"execution_count": 53,
|
1324 |
+
"id": "1b1b3c75-9ac6-4c34-937f-1a45009b7be0",
|
1325 |
+
"metadata": {},
|
1326 |
+
"outputs": [],
|
1327 |
+
"source": [
|
1328 |
+
"# candidate_resume_details"
|
1329 |
+
]
|
1330 |
+
},
|
1331 |
+
{
|
1332 |
+
"cell_type": "code",
|
1333 |
+
"execution_count": 40,
|
1334 |
+
"id": "b398c4f0-d2c2-4718-bfe1-a1a9f56d3389",
|
1335 |
+
"metadata": {},
|
1336 |
+
"outputs": [
|
1337 |
+
{
|
1338 |
+
"name": "stdout",
|
1339 |
+
"output_type": "stream",
|
1340 |
+
"text": [
|
1341 |
+
"I am a highly motivated and detail-oriented Data Scientist with a strong educational background in Computer Science and Automation. I hold a Master of Technology degree from the Indian Institute of Science and a Bachelor of Engineering degree from P.E.S. Modern College of Engineering.\n",
|
1342 |
+
"\n",
|
1343 |
+
"With over a year of industry experience, I have worked as a Deep Learning Research Intern at NeuroPixel.AI, where I optimized Stable Diffusion models to improve performance, achieving significant efficiency gains. I also worked as a Software Intern at Western Union, where I analyzed global transaction data, identified bugs, and implemented solutions that boosted conversion rates by 10%. Additionally, I have completed internships at Amazon Web Services, where I acquired foundational skills in AWS and utilized essential tools and services to support scalable cloud architectures.\n",
|
1344 |
+
"\n",
|
1345 |
+
"I have a strong technical skillset, with expertise in Python, C++, Neural Networks, Machine Learning, Deep Learning, Gen AI, and Natural Language Processing (NLP). I am proficient in using developer tools such as VS Code, Jupyter Notebook, and Google Colab.\n",
|
1346 |
+
"\n",
|
1347 |
+
"I have worked on various projects, including a Gurgaon Real Estate Price Prediction model, where I achieved a best R² score of 0.90 and a Mean Absolute Error (MAE) of 44 lakhs with the RandomForest model. I also developed a dual-layer recommendation system to boost user engagement by suggesting top 5 properties to the user. Additionally, I have worked on optimizing the performance of Dilated Convolution using C++ and CUDA, achieving a maximum improvement of 96% through multi-threading and a speedup of 600.47 and improvement of 99.83% using CUDA.\n",
|
1348 |
+
"\n",
|
1349 |
+
"I am excited to apply my skills and experience to a Data Scientist role, where I can design, develop, and deploy machine learning models, algorithms, and systems to solve complex business problems.\n",
|
1350 |
+
"\n",
|
1351 |
+
"Mandar Bhalerao\n",
|
1352 |
+
"Data Scientist\n"
|
1353 |
+
]
|
1354 |
+
}
|
1355 |
+
],
|
1356 |
+
"source": [
|
1357 |
+
"# again creating a chain of prompt_email and llm\n",
|
1358 |
+
"# invoking the chain by passing the parameter of job_description and link_list\n",
|
1359 |
+
"\n",
|
1360 |
+
"# chain_email = prompt_email | llm\n",
|
1361 |
+
"# res = chain_email.invoke({\"job_description\": str(job_description), \"resume_details\": candidate_resume_details})\n",
|
1362 |
+
"# print(res.content)"
|
1363 |
+
]
|
1364 |
+
},
|
1365 |
+
{
|
1366 |
+
"cell_type": "code",
|
1367 |
+
"execution_count": 96,
|
1368 |
+
"id": "44c12cb4-6ec5-45dc-a7df-2865d5d5eb59",
|
1369 |
+
"metadata": {},
|
1370 |
+
"outputs": [],
|
1371 |
+
"source": [
|
1372 |
+
"# this is prompt template for writing an email\n",
|
1373 |
+
"\n",
|
1374 |
+
"prompt_email = PromptTemplate.from_template(\n",
|
1375 |
+
" \"\"\"\n",
|
1376 |
+
" ### JOB DESCRIPTION:\n",
|
1377 |
+
" This is a job description\n",
|
1378 |
+
" \n",
|
1379 |
+
" {job_description}\n",
|
1380 |
+
"\n",
|
1381 |
+
" ### INSTRUCTION:\n",
|
1382 |
+
" These are the person's details.\n",
|
1383 |
+
" {summary}\n",
|
1384 |
+
" Consider yourself as this person. \n",
|
1385 |
+
" \n",
|
1386 |
+
" Introduce yourself in an engaging way from above with your name from the above details and your current designation. \n",
|
1387 |
+
" Try to find some similar things in the job description with your details. Mention those things which are similar. Mention your experience details.\n",
|
1388 |
+
" Your job is to write a cold email to the hiring manager regarding the job mentioned above describing the capability of you \n",
|
1389 |
+
" in fulfilling their needs.\n",
|
1390 |
+
" End the email with Name and Current place where your are working or studying. \n",
|
1391 |
+
" Do not provide a preamble.\n",
|
1392 |
+
" ### EMAIL (NO PREAMBLE):\n",
|
1393 |
+
"\n",
|
1394 |
+
" \"\"\"\n",
|
1395 |
+
" )"
|
1396 |
+
]
|
1397 |
+
},
|
1398 |
+
{
|
1399 |
+
"cell_type": "code",
|
1400 |
+
"execution_count": 97,
|
1401 |
+
"id": "0e6d6187-ca88-451b-8a36-3db0299cbb84",
|
1402 |
+
"metadata": {},
|
1403 |
+
"outputs": [
|
1404 |
+
{
|
1405 |
+
"name": "stdout",
|
1406 |
+
"output_type": "stream",
|
1407 |
+
"text": [
|
1408 |
+
"Subject: Application for Data Scientist Role at Myntra\n",
|
1409 |
+
"\n",
|
1410 |
+
"Dear Hiring Manager,\n",
|
1411 |
+
"\n",
|
1412 |
+
"I'm Mandar Bhalerao, a highly skilled and enthusiastic individual with a strong background in computer science and automation, currently pursuing my Master of Technology in Computer Science and Automation at the Indian Institute of Science.\n",
|
1413 |
+
"\n",
|
1414 |
+
"As I came across the Data Scientist role at Myntra, I was excited to see the alignment between the job requirements and my skills. With a solid foundation in computer science and a CGPA of 7.30/10.0 in my current program, I'm confident in my ability to design, develop, and deploy machine learning models to solve complex business problems.\n",
|
1415 |
+
"\n",
|
1416 |
+
"I noticed that the job description mentions expertise in machine learning frameworks like TensorFlow, PyTorch, or scikit-learn, which I've gained experience with through my projects, including a real estate price prediction model and optimizing performance of dilated convolution. Additionally, my proficiency in Python, a high-level programming language, aligns with the job requirements.\n",
|
1417 |
+
"\n",
|
1418 |
+
"My experience in internships at NeuroPixel.AI, Western Union, and Amazon Web Services has provided me with valuable insights into cloud architecture, user experience design, and data analysis. I've also worked on projects that involve natural language processing, recommender systems, and optimization, which are mentioned in the job description.\n",
|
1419 |
+
"\n",
|
1420 |
+
"As a skilled data scientist, I've achieved several accolades, including securing the first position in Chase The Py By CODEFIESTA 2022, earning gold badges on HackerRank, and achieving a global rank of 157 in the February Long Challenge at Codechef. I'm confident that my technical skills, combined with my experience and passion for machine learning, make me an ideal candidate for this role.\n",
|
1421 |
+
"\n",
|
1422 |
+
"I'd love the opportunity to discuss how my skills and experience align with the requirements of the Data Scientist role at Myntra. Please find my resume attached for your reference.\n",
|
1423 |
+
"\n",
|
1424 |
+
"Thank you for considering my application.\n",
|
1425 |
+
"\n",
|
1426 |
+
"Best regards,\n",
|
1427 |
+
"\n",
|
1428 |
+
"Mandar Bhalerao\n",
|
1429 |
+
"Indian Institute of Science, Bangalore\n"
|
1430 |
+
]
|
1431 |
+
}
|
1432 |
+
],
|
1433 |
+
"source": [
|
1434 |
+
"# again creating a chain of prompt_email and llm\n",
|
1435 |
+
"# invoking the chain by passing the parameter of job_description and link_list\n",
|
1436 |
+
"\n",
|
1437 |
+
"chain_email = prompt_email | llm\n",
|
1438 |
+
"res5 = chain_email.invoke({\"job_description\": str(job_description), \"summary\": summary})\n",
|
1439 |
+
"print(res5.content)"
|
1440 |
+
]
|
1441 |
+
},
|
1442 |
+
{
|
1443 |
+
"cell_type": "code",
|
1444 |
+
"execution_count": null,
|
1445 |
+
"id": "a9f23c90-47da-43ee-8017-3dd4c4082cc9",
|
1446 |
+
"metadata": {},
|
1447 |
+
"outputs": [],
|
1448 |
+
"source": []
|
1449 |
+
}
|
1450 |
+
],
|
1451 |
+
"metadata": {
|
1452 |
+
"kernelspec": {
|
1453 |
+
"display_name": "Python 3 (ipykernel)",
|
1454 |
+
"language": "python",
|
1455 |
+
"name": "python3"
|
1456 |
+
},
|
1457 |
+
"language_info": {
|
1458 |
+
"codemirror_mode": {
|
1459 |
+
"name": "ipython",
|
1460 |
+
"version": 3
|
1461 |
+
},
|
1462 |
+
"file_extension": ".py",
|
1463 |
+
"mimetype": "text/x-python",
|
1464 |
+
"name": "python",
|
1465 |
+
"nbconvert_exporter": "python",
|
1466 |
+
"pygments_lexer": "ipython3",
|
1467 |
+
"version": "3.10.7"
|
1468 |
+
}
|
1469 |
+
},
|
1470 |
+
"nbformat": 4,
|
1471 |
+
"nbformat_minor": 5
|
1472 |
+
}
|
requirements.txt
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
langchain==0.2.14
|
2 |
+
langchain-community==0.2.12
|
3 |
+
langchain-groq===0.1.9
|
4 |
+
unstructured==0.14.6
|
5 |
+
selenium==4.21.0
|
6 |
+
chromadb==0.5.0
|
7 |
+
streamlit==1.35.0
|
8 |
+
pandas==2.0.2
|
9 |
+
python-dotenv==1.0.0
|
tutorial_groq.ipynb
ADDED
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 1,
|
6 |
+
"id": "0f76f97c",
|
7 |
+
"metadata": {},
|
8 |
+
"outputs": [
|
9 |
+
{
|
10 |
+
"name": "stdout",
|
11 |
+
"output_type": "stream",
|
12 |
+
"text": [
|
13 |
+
"... Neil Armstrong!\n",
|
14 |
+
"\n",
|
15 |
+
"On July 20, 1969, Neil Armstrong became the first person to set foot on the Moon as part of the Apollo 11 mission. He famously declared, \"That's one small step for man, one giant leap for mankind\" as he stepped off the lunar module Eagle onto the Moon's surface.\n",
|
16 |
+
"\n",
|
17 |
+
"Would you like to know more about the Apollo 11 mission or Neil Armstrong's life?\n"
|
18 |
+
]
|
19 |
+
}
|
20 |
+
],
|
21 |
+
"source": [
|
22 |
+
"from langchain_groq import ChatGroq\n",
|
23 |
+
"\n",
|
24 |
+
"llm = ChatGroq(\n",
|
25 |
+
" temperature=0, \n",
|
26 |
+
" groq_api_key='<add your API key here.>', \n",
|
27 |
+
" model_name=\"llama-3.1-70b-versatile\"\n",
|
28 |
+
")\n",
|
29 |
+
"\n",
|
30 |
+
"response = llm.invoke(\"The first person to land on moon was ...\")\n",
|
31 |
+
"print(response.content)"
|
32 |
+
]
|
33 |
+
}
|
34 |
+
],
|
35 |
+
"metadata": {
|
36 |
+
"kernelspec": {
|
37 |
+
"display_name": "Python 3 (ipykernel)",
|
38 |
+
"language": "python",
|
39 |
+
"name": "python3"
|
40 |
+
},
|
41 |
+
"language_info": {
|
42 |
+
"codemirror_mode": {
|
43 |
+
"name": "ipython",
|
44 |
+
"version": 3
|
45 |
+
},
|
46 |
+
"file_extension": ".py",
|
47 |
+
"mimetype": "text/x-python",
|
48 |
+
"name": "python",
|
49 |
+
"nbconvert_exporter": "python",
|
50 |
+
"pygments_lexer": "ipython3",
|
51 |
+
"version": "3.10.11"
|
52 |
+
}
|
53 |
+
},
|
54 |
+
"nbformat": 4,
|
55 |
+
"nbformat_minor": 5
|
56 |
+
}
|