vella-backend / _utils /LLMs /LLM_class.py
luanpoppe
feat: mudando contextuals para ser Gemini-2.0-flash
91028c0
raw
history blame contribute delete
940 Bytes
from typing import cast
from pydantic import SecretStr
from setup.environment import default_model
from setup.easy_imports import ChatOpenAI, ChatGoogleGenerativeAI
import os
deepseek_api_key = cast(str, os.environ.get("DEEPSEEKK_API_KEY"))
google_api_key = cast(str, os.environ.get("GOOGLE_API_KEY_PEIXE"))
class LLM:
def __init__(self):
pass
# def create_GPT_model(self, model=default_model):
# return ChatOpen()
def deepseek(self, model="deepseek-chat"):
return ChatOpenAI(
api_key=SecretStr(deepseek_api_key),
base_url="https://api.deepseek.com/v1",
model=model,
)
def google_gemini(self, model="gemini-2.0-flash"):
return ChatGoogleGenerativeAI(
api_key=SecretStr(google_api_key),
model=model,
temperature=0,
max_tokens=None,
timeout=None,
max_retries=2,
)