cold_email_2.0 / llm.py
Raykarr's picture
Second model version
6eb7feb
raw
history blame
1.64 kB
# Default
import os
from groq import Groq
from dotenv import load_dotenv
import traceback
load_dotenv()
client = Groq(
# This is the default and can be omitted
api_key=os.environ.get("GROQ_API_KEY"),
)
def chat_with_groq(user_input, additional_info=None):
try:
template_prompt = f"""
You are a professional email writer who specialises in writing cold email.
Generate a cold email to {user_input['recipient_role']} based on the following information:
Industry: {user_input['industry']}
Tone: {user_input['tone']}
Context: {user_input['context']}
Sender's Name: {user_input['sender_name']}
Receiver's Name: {user_input['receiver_name']}
Receiver's Designation: {user_input['receiver_designation']}
Receiver's Company Name: {user_input['receiver_company_name']}
Incorporate all information and write a compelling email.
"""
# test purposes:
# return template_prompt
chat_completion = client.chat.completions.create(
messages=[
{
"role": "system",
"content": "You are an expert email writer specializing in cold emails.",
},
{
"role": "user",
"content": template_prompt,
}
],
model="mixtral-8x7b-32768",
)
generated_email = chat_completion.choices[0].message.content
return generated_email
except Exception as err:
traceback.print_exc()
print(err)