luanpoppe
Merge branch 'tests' of https://github.com/luanpoppe/vella-backend into feat-adicionar-google-gemini-2
753b4be
raw
history blame
3.66 kB
from rest_framework import serializers
from _antigos.resumos.serializer import ResumoCursorSerializer
from _utils.gerar_relatorio_modelo_usuario.prompts import (
prompt_gerar_documento,
prompt_auxiliar_padrao,
)
user_message = "What are the main points of this document?"
class FileInfoSerializer(serializers.Serializer):
unique_id = serializers.CharField(max_length=255)
tipo_arquivo = serializers.CharField(max_length=255)
link_arquivo = serializers.URLField()
class GerarDocumentoSerializer(ResumoCursorSerializer):
system_prompt = None
files = serializers.ListField(child=FileInfoSerializer(), required=True)
bubble_editor_version = serializers.CharField(
required=False, default="version-test"
) # Será o valor utilizado dentro da URL da requisição pro Bubble
# prompt_auxiliar = serializers.CharField(
# required=False, default=prompt_auxiliar_padrao
# )
prompt_gerar_documento = serializers.CharField(
required=False, default=prompt_gerar_documento
)
user_message = serializers.CharField(required=False, default=user_message)
num_chunks_retrieval = serializers.IntegerField(default=20)
embedding_weight = serializers.FloatField(default=0.5)
bm25_weight = serializers.FloatField(default=0.5)
context_window = serializers.IntegerField(default=3)
chunk_overlap = serializers.IntegerField(default=800)
num_k_rerank = serializers.IntegerField(default=20)
model_cohere_rerank = serializers.CharField(
required=False, default="rerank-english-v2.0"
)
more_initial_chunks_for_reranking = serializers.IntegerField(default=100)
claude_context_model = serializers.CharField(
required=False, default="claude-3-haiku-20240307"
)
gpt_temperature = serializers.FloatField(default=0)
id_modelo_do_usuario = serializers.IntegerField(required=False)
should_have_contextual_chunks = serializers.BooleanField(default=False) # type: ignore
should_use_llama_parse = serializers.BooleanField(required=False, default=False) # type: ignore
llm_ultimas_requests = serializers.CharField(
required=False, default="gemini-2.0-flash"
)
class GerarDocumentoComPDFProprioSerializer(ResumoCursorSerializer):
system_prompt = None
# prompt_auxiliar = serializers.CharField(
# required=False, default=prompt_auxiliar_padrao
# )
prompt_gerar_documento = serializers.CharField(
required=False, default=prompt_gerar_documento
)
user_message = serializers.CharField(required=False, default=user_message)
num_chunks_retrieval = serializers.IntegerField(default=20)
embedding_weight = serializers.FloatField(default=0.5)
bm25_weight = serializers.FloatField(default=0.5)
context_window = serializers.IntegerField(default=3)
chunk_overlap = serializers.IntegerField(default=800)
num_k_rerank = serializers.IntegerField(default=20)
model_cohere_rerank = serializers.CharField(
required=False, default="rerank-english-v2.0"
)
more_initial_chunks_for_reranking = serializers.IntegerField(default=100)
claude_context_model = serializers.CharField(
required=False, default="claude-3-haiku-20240307"
)
gpt_temperature = serializers.FloatField(default=0)
id_modelo_do_usuario = serializers.IntegerField(required=False, default=11)
should_have_contextual_chunks = serializers.BooleanField(default=False) # type: ignore
should_use_llama_parse = serializers.BooleanField(required=False, default=False) # type: ignore
llm_ultimas_requests = serializers.CharField(required=False, default="gpt-4o-mini")