File size: 9,488 Bytes
9ada6bf b6846ae 37e6cfd 9ada6bf bff2e53 bfda8f6 9ada6bf bfda8f6 37e6cfd bff2e53 9ada6bf b63d371 37e6cfd bff2e53 9ada6bf 5e249c4 9ada6bf bff2e53 be28632 bff2e53 5e249c4 be28632 bff2e53 be28632 bff2e53 5e249c4 bff2e53 5e249c4 bff2e53 9ada6bf 37e6cfd 9ada6bf b6846ae 9ada6bf b6846ae 37e6cfd bff2e53 37e6cfd 2be8ad6 37e6cfd 9ada6bf 18ff80a 92a84ee be28632 18ff80a b8778bd 18ff80a b8778bd 18ff80a b8778bd 18ff80a b8778bd 18ff80a 9ada6bf b8778bd 18ff80a b8778bd 18ff80a 5e249c4 18ff80a b8778bd 92a84ee 18ff80a b8778bd 18ff80a b8778bd 9ada6bf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 |
import os
import requests
import gradio as gr
from openai import OpenAI
import logging
# Configure logging
logging.basicConfig(level=logging.INFO)
# Fetch API keys from environment variables
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
PROXYCURL_API_KEY = os.getenv("PROXYCURL_API_KEY")
FIRECRAWL_API_KEY = os.getenv("FIRECRAWL_API_KEY")
# Function to sanitize data by ensuring it's safe and clean for use
def sanitize_data(data, default_value=""):
return data.strip() if isinstance(data, str) and data.strip() else default_value
# Function to fetch LinkedIn data using the Proxycurl API
def fetch_linkedin_data(linkedin_url):
api_key = os.getenv("PROXYCURL_API_KEY")
headers = {'Authorization': f'Bearer {api_key}'}
api_endpoint = 'https://nubela.co/proxycurl/api/v2/linkedin'
logging.info("Fetching LinkedIn data...")
try:
response = requests.get(api_endpoint,
params={'url': linkedin_url},
headers=headers,
timeout=10)
if response.status_code == 200:
logging.info("LinkedIn data fetched successfully.")
return response.json()
else:
logging.error(f"Error fetching LinkedIn data: {response.text}")
return {"error": f"Error fetching LinkedIn data: {response.text}"}
except Exception as e:
logging.error(f"Exception during LinkedIn data fetch: {e}")
return {"error": f"Exception during LinkedIn data fetch: {e}"}
# Function to fetch company information using Firecrawl API
def fetch_company_info(company_url):
api_key = os.getenv("FIRECRAWL_API_KEY")
headers = {
'Authorization': f'Bearer {api_key}',
'Content-Type': 'application/json'
}
api_endpoint = 'https://api.firecrawl.dev/v1/crawl'
data = {
"url": company_url,
"limit": 100,
"scrapeOptions": {
"formats": ["markdown", "html"]
}
}
logging.info("Fetching company information...")
try:
response = requests.post(api_endpoint, json=data, headers=headers, timeout=15)
if response.status_code == 200:
logging.info("Company information fetched successfully.")
return response.json()
else:
logging.error(f"Error fetching company information: {response.text}")
return {"error": f"Error fetching company information: {response.text}"}
except Exception as e:
logging.error(f"Exception during company info fetch: {e}")
return {"error": f"Exception during company info fetch: {e}"}
# Function to structure the email dynamically based on inputs and fetched data
def structure_email(user_data, linkedin_info, company_info):
# Sanitize and extract the required information
linkedin_role = sanitize_data(linkedin_info.get('current_role', user_data['role']))
linkedin_skills = sanitize_data(linkedin_info.get('skills', 'various relevant skills'))
linkedin_industry = sanitize_data(linkedin_info.get('industry', 'the industry'))
company_name = sanitize_data(user_data['company_url'] or company_info.get('company_name', 'the company'))
company_mission = sanitize_data(company_info.get('mission', f"{company_name}'s mission"))
company_goal = sanitize_data(company_info.get('goal', 'achieving excellence'))
# Build the email directly with the available and sanitized information
email_body = f"Dear Hiring Manager,\n\n"
email_body += f"I am writing to express my interest in the {sanitize_data(user_data['role'])} position at {company_name}. "
email_body += f"I am particularly drawn to {company_name}'s mission to {company_mission}, which aligns with my passion and expertise in {linkedin_industry}. "
email_body += f"As a {linkedin_role}, I have developed skills in {linkedin_skills}, which I believe are highly relevant to the requirements of this role.\n\n"
email_body += f"I am confident that my background and expertise in {linkedin_skills} can contribute to achieving {company_goal}. "
email_body += f"My experience in similar roles has prepared me to make an immediate and meaningful impact on your team, and I am excited about the opportunity to bring my expertise to {company_name}.\n\n"
email_body += f"I would appreciate the opportunity to discuss how my background and skills align with the needs of your organization. "
email_body += "Thank you for your time and consideration. I look forward to the possibility of contributing to your team.\n\n"
email_body += f"Best regards,\n{sanitize_data(user_data['name'])}"
return email_body
# Function to validate the generated email for completeness and professionalism
def validate_email(email_content):
logging.info("Validating email content...")
logging.info(f"Email Content for Validation: {email_content}")
# Check if essential elements exist in the email content
return all(keyword in email_content for keyword in ["interest", "skills", "experience", "contribute", "Best regards"])
# Function to generate email content using Nvidia Nemotron LLM (non-streaming for simplicity)
def generate_email_content(api_key, prompt):
client = OpenAI(
base_url="https://integrate.api.nvidia.com/v1",
api_key=api_key
)
logging.info("Generating email content...")
try:
response = client.chat.completions.create(
model="nvidia/llama-3.1-nemotron-70b-instruct",
messages=[
{"role": "user", "content": prompt}
],
temperature=0.5,
top_p=1,
max_tokens=1024,
stream=False
)
if hasattr(response, 'choices') and len(response.choices) > 0:
email_content = response.choices[0].message.content
logging.info("Email content generated successfully.")
return email_content
else:
logging.error("Error: No choices found in the response.")
return "Error generating email content: No valid choices."
except Exception as e:
logging.error(f"Error generating email content: {e}")
return "Error generating email content."
# Custom Agent class to simulate behavior similar to OpenAI's Swarm framework
class Agent:
def __init__(self, name, instructions, user_data):
self.name = name
self.instructions = instructions
self.user_data = user_data
def act(self):
if self.name == "Data Collection Agent":
linkedin_info = fetch_linkedin_data(self.user_data['linkedin_url'])
company_info = fetch_company_info(self.user_data['company_url'])
return linkedin_info, company_info
elif self.name == "Email Generation Agent":
user_data = self.user_data['user_data']
linkedin_info = self.user_data['linkedin_info']
company_info = self.user_data['company_info']
email_content = structure_email(user_data, linkedin_info, company_info)
return email_content
# Simulated Swarm class to manage agents
class Swarm:
def __init__(self):
self.agents = []
def add_agent(self, agent):
self.agents.append(agent)
def run(self):
for agent in self.agents:
if agent.name == "Data Collection Agent":
linkedin_info, company_info = agent.act()
if "error" in linkedin_info or "error" in company_info:
return "Error fetching data. Please check the LinkedIn and company URLs."
return linkedin_info, company_info
# Function that integrates the agents and manages iterations
def run_agent(name, email, phone, linkedin_url, company_url, role):
user_data = {
"name": name,
"email": email,
"phone": phone,
"linkedin_url": linkedin_url,
"company_url": company_url,
"role": role
}
email_swarm = Swarm()
data_collection_agent = Agent("Data Collection Agent", "Collect user inputs and relevant data", user_data)
email_swarm.add_agent(data_collection_agent)
linkedin_info, company_info = email_swarm.run()
if isinstance(linkedin_info, str):
return linkedin_info
agent_data = {
"user_data": user_data,
"linkedin_info": linkedin_info,
"company_info": company_info
}
email_agent = Agent("Email Generation Agent", "Generate the email content", agent_data)
email_content = email_agent.act()
for i in range(3):
if validate_email(email_content):
return email_content
else:
refined_prompt = f"Refine: {structure_email(user_data, linkedin_info, company_info)}"
email_content = generate_email_content(OPENAI_API_KEY, refined_prompt)
return "Unable to generate a valid email after 3 attempts."
# Set up the Gradio interface
final_interface = gr.Interface(
fn=run_agent,
inputs=[
gr.Textbox(label="Name"),
gr.Textbox(label="Email"),
gr.Textbox(label="Phone Number"),
gr.Textbox(label="LinkedIn Profile URL"),
gr.Textbox(label="Company URL or Name"),
gr.Textbox(label="Role Being Applied For")
],
outputs="text",
title="Email Writing AI Agent",
description="Autonomously generate a professional email tailored to the job application."
)
if __name__ == "__main__":
final_interface.launch()
|