v3 / modules /chatbot /chat_process.py
AIdeaText's picture
Create chat_process.py
bef9637 verified
raw
history blame
1.48 kB
# modules/chatbot/chatbot/chat_process.py
import anthropic
import logging
from typing import Dict, Generator
logger = logging.getLogger(__name__)
class ChatProcessor:
def __init__(self):
self.client = anthropic.Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"])
self.conversation_history = []
def process_chat_input(self, message: str, lang_code: str) -> Generator[str, None, None]:
"""
Procesa el input del chat y genera respuestas por chunks
"""
try:
# Agregar mensaje a la historia
self.conversation_history.append(f"Human: {message}")
# Generar respuesta
response = self.client.completions.create(
model="claude-3-opus-20240229",
prompt=f"{message}\n\nAssistant:",
max_tokens_to_sample=300,
temperature=0.7,
stream=True # Habilitar streaming
)
# Retornar chunks de la respuesta
full_response = ""
for chunk in response:
if chunk.completion:
yield chunk.completion
full_response += chunk.completion
# Guardar respuesta completa en la historia
self.conversation_history.append(f"Assistant: {full_response}")
except Exception as e:
logger.error(f"Error en process_chat_input: {str(e)}")
yield f"Error: {str(e)}"