|
import os |
|
import requests |
|
import gradio as gr |
|
from openai import OpenAI |
|
import logging |
|
|
|
|
|
logging.basicConfig(level=logging.INFO) |
|
|
|
|
|
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") |
|
PROXYCURL_API_KEY = os.getenv("PROXYCURL_API_KEY") |
|
FIRECRAWL_API_KEY = os.getenv("FIRECRAWL_API_KEY") |
|
|
|
|
|
def sanitize_data(data, default_value=""): |
|
return data.strip() if isinstance(data, str) and data.strip() else default_value |
|
|
|
|
|
def fetch_linkedin_data(linkedin_url): |
|
api_key = os.getenv("PROXYCURL_API_KEY") |
|
headers = {'Authorization': f'Bearer {api_key}'} |
|
api_endpoint = 'https://nubela.co/proxycurl/api/v2/linkedin' |
|
|
|
logging.info("Fetching LinkedIn data...") |
|
try: |
|
response = requests.get(api_endpoint, |
|
params={'url': linkedin_url}, |
|
headers=headers, |
|
timeout=10) |
|
if response.status_code == 200: |
|
logging.info("LinkedIn data fetched successfully.") |
|
return response.json() |
|
else: |
|
logging.error(f"Error fetching LinkedIn data: {response.text}") |
|
return {"error": f"Error fetching LinkedIn data: {response.text}"} |
|
except Exception as e: |
|
logging.error(f"Exception during LinkedIn data fetch: {e}") |
|
return {"error": f"Exception during LinkedIn data fetch: {e}"} |
|
|
|
|
|
def fetch_company_info(company_url): |
|
api_key = os.getenv("FIRECRAWL_API_KEY") |
|
headers = { |
|
'Authorization': f'Bearer {api_key}', |
|
'Content-Type': 'application/json' |
|
} |
|
api_endpoint = 'https://api.firecrawl.dev/v1/crawl' |
|
|
|
data = { |
|
"url": company_url, |
|
"limit": 100, |
|
"scrapeOptions": { |
|
"formats": ["markdown", "html"] |
|
} |
|
} |
|
|
|
logging.info("Fetching company information...") |
|
try: |
|
response = requests.post(api_endpoint, json=data, headers=headers, timeout=15) |
|
if response.status_code == 200: |
|
logging.info("Company information fetched successfully.") |
|
return response.json() |
|
else: |
|
logging.error(f"Error fetching company information: {response.text}") |
|
return {"error": f"Error fetching company information: {response.text}"} |
|
except Exception as e: |
|
logging.error(f"Exception during company info fetch: {e}") |
|
return {"error": f"Exception during company info fetch: {e}"} |
|
|
|
|
|
def structure_email(user_data, linkedin_info, company_info): |
|
|
|
linkedin_role = sanitize_data(linkedin_info.get('current_role', user_data['role'])) |
|
linkedin_skills = sanitize_data(linkedin_info.get('skills', 'various relevant skills')) |
|
linkedin_industry = sanitize_data(linkedin_info.get('industry', 'the industry')) |
|
company_name = sanitize_data(user_data['company_url'] or company_info.get('company_name', 'the company')) |
|
company_mission = sanitize_data(company_info.get('mission', f"{company_name}'s mission")) |
|
company_goal = sanitize_data(company_info.get('goal', 'achieving excellence')) |
|
|
|
|
|
email_body = f"Dear Hiring Manager,\n\n" |
|
email_body += f"I am writing to express my interest in the {sanitize_data(user_data['role'])} position at {company_name}. " |
|
email_body += f"I am particularly drawn to {company_name}'s mission to {company_mission}, which aligns with my passion and expertise in {linkedin_industry}. " |
|
email_body += f"As a {linkedin_role}, I have developed skills in {linkedin_skills}, which I believe are highly relevant to the requirements of this role.\n\n" |
|
email_body += f"I am confident that my background and expertise in {linkedin_skills} can contribute to achieving {company_goal}. " |
|
email_body += f"My experience in similar roles has prepared me to make an immediate and meaningful impact on your team, and I am excited about the opportunity to bring my expertise to {company_name}.\n\n" |
|
email_body += f"I would appreciate the opportunity to discuss how my background and skills align with the needs of your organization. " |
|
email_body += "Thank you for your time and consideration. I look forward to the possibility of contributing to your team.\n\n" |
|
email_body += f"Best regards,\n{sanitize_data(user_data['name'])}" |
|
|
|
return email_body |
|
|
|
|
|
def validate_email(email_content): |
|
logging.info("Validating email content...") |
|
logging.info(f"Email Content for Validation: {email_content}") |
|
|
|
|
|
return all(keyword in email_content for keyword in ["interest", "skills", "experience", "contribute", "Best regards"]) |
|
|
|
|
|
def generate_email_content(api_key, prompt): |
|
client = OpenAI( |
|
base_url="https://integrate.api.nvidia.com/v1", |
|
api_key=api_key |
|
) |
|
|
|
logging.info("Generating email content...") |
|
try: |
|
response = client.chat.completions.create( |
|
model="nvidia/llama-3.1-nemotron-70b-instruct", |
|
messages=[ |
|
{"role": "user", "content": prompt} |
|
], |
|
temperature=0.5, |
|
top_p=1, |
|
max_tokens=1024, |
|
stream=False |
|
) |
|
|
|
if hasattr(response, 'choices') and len(response.choices) > 0: |
|
email_content = response.choices[0].message.content |
|
logging.info("Email content generated successfully.") |
|
return email_content |
|
else: |
|
logging.error("Error: No choices found in the response.") |
|
return "Error generating email content: No valid choices." |
|
except Exception as e: |
|
logging.error(f"Error generating email content: {e}") |
|
return "Error generating email content." |
|
|
|
|
|
class Agent: |
|
def __init__(self, name, instructions, user_data): |
|
self.name = name |
|
self.instructions = instructions |
|
self.user_data = user_data |
|
|
|
def act(self): |
|
if self.name == "Data Collection Agent": |
|
linkedin_info = fetch_linkedin_data(self.user_data['linkedin_url']) |
|
company_info = fetch_company_info(self.user_data['company_url']) |
|
return linkedin_info, company_info |
|
elif self.name == "Email Generation Agent": |
|
user_data = self.user_data['user_data'] |
|
linkedin_info = self.user_data['linkedin_info'] |
|
company_info = self.user_data['company_info'] |
|
email_content = structure_email(user_data, linkedin_info, company_info) |
|
return email_content |
|
|
|
|
|
class Swarm: |
|
def __init__(self): |
|
self.agents = [] |
|
|
|
def add_agent(self, agent): |
|
self.agents.append(agent) |
|
|
|
def run(self): |
|
for agent in self.agents: |
|
if agent.name == "Data Collection Agent": |
|
linkedin_info, company_info = agent.act() |
|
if "error" in linkedin_info or "error" in company_info: |
|
return "Error fetching data. Please check the LinkedIn and company URLs." |
|
return linkedin_info, company_info |
|
|
|
|
|
def run_agent(name, email, phone, linkedin_url, company_url, role): |
|
user_data = { |
|
"name": name, |
|
"email": email, |
|
"phone": phone, |
|
"linkedin_url": linkedin_url, |
|
"company_url": company_url, |
|
"role": role |
|
} |
|
|
|
email_swarm = Swarm() |
|
data_collection_agent = Agent("Data Collection Agent", "Collect user inputs and relevant data", user_data) |
|
email_swarm.add_agent(data_collection_agent) |
|
|
|
linkedin_info, company_info = email_swarm.run() |
|
if isinstance(linkedin_info, str): |
|
return linkedin_info |
|
|
|
agent_data = { |
|
"user_data": user_data, |
|
"linkedin_info": linkedin_info, |
|
"company_info": company_info |
|
} |
|
|
|
email_agent = Agent("Email Generation Agent", "Generate the email content", agent_data) |
|
email_content = email_agent.act() |
|
|
|
for i in range(3): |
|
if validate_email(email_content): |
|
return email_content |
|
else: |
|
refined_prompt = f"Refine: {structure_email(user_data, linkedin_info, company_info)}" |
|
email_content = generate_email_content(OPENAI_API_KEY, refined_prompt) |
|
|
|
return "Unable to generate a valid email after 3 attempts." |
|
|
|
|
|
final_interface = gr.Interface( |
|
fn=run_agent, |
|
inputs=[ |
|
gr.Textbox(label="Name"), |
|
gr.Textbox(label="Email"), |
|
gr.Textbox(label="Phone Number"), |
|
gr.Textbox(label="LinkedIn Profile URL"), |
|
gr.Textbox(label="Company URL or Name"), |
|
gr.Textbox(label="Role Being Applied For") |
|
], |
|
outputs="text", |
|
title="Email Writing AI Agent", |
|
description="Autonomously generate a professional email tailored to the job application." |
|
) |
|
|
|
if __name__ == "__main__": |
|
final_interface.launch() |
|
|