from langchain_groq import ChatGroq | |
import os | |
api=os.getenv('groq_api') | |
def get_llm(): | |
""" | |
Returns the language model instance (LLM) using ChatGroq API. | |
The LLM used is Llama 3.1 with a versatile 70 billion parameters model. | |
""" | |
return ChatGroq( | |
model="llama-3.3-70b-versatile", | |
temperature=0, | |
max_tokens=1024, | |
api_key=api | |
) | |
llm = get_llm() | |