cold_email / llm.py
Raykarr's picture
First model version
18c7cc7
raw
history blame
1.44 kB
# Default
import os
from groq import Groq
import traceback
client = Groq(
# This is the default and can be omitted
api_key=os.environ.get("GROQ_API_KEY"),
)
def chat_with_groq(user_input, additional_info=None):
try:
template_prompt = f"""
You are a professional email writer who specialises in writing cold email.
Generate a cold email to {user_input['recipient_name']} based on the following information:
Industry: {user_input['industry']}
Tone: {user_input['tone']}
Context: {user_input['context']}
Senders Name: {user_input['name']}
Designation: {user_input['designation']}
Incorporate all information and write a compelling email.
"""
# test purposes:
# return template_prompt
chat_completion = client.chat.completions.create(
messages=[
{
"role": "system",
"content": "You are an expert email writer specializing in cold emails.",
},
{
"role": "user",
"content": template_prompt,
}
],
model="mixtral-8x7b-32768",
)
generated_email = chat_completion.choices[0].message.content
return generated_email
except Exception as err:
traceback.print_exc()
print(err)