from langchain.chat_models import ChatOpenAI | |
from langchain.llms import OpenAI | |
def get_llm(model_name, model_temperature, api_key, max_tokens=None): | |
if model_name == "text-davinci-003": | |
return OpenAI(temperature=model_temperature, model_name=model_name, max_tokens=max_tokens, | |
openai_api_key=api_key) | |
else: | |
return ChatOpenAI(temperature=model_temperature, model_name=model_name, max_tokens=max_tokens, | |
openai_api_key=api_key) | |