|
|
|
import streamlit as st |
|
import re |
|
import io |
|
from io import BytesIO |
|
import base64 |
|
import matplotlib.pyplot as plt |
|
import pandas as pd |
|
import time |
|
from datetime import datetime |
|
from streamlit_player import st_player |
|
from modules.database import store_application_request |
|
from modules.email import send_email_notification |
|
from spacy import displacy |
|
import logging |
|
|
|
|
|
logging.basicConfig(level=logging.INFO) |
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
|
|
from .auth import authenticate_user, register_user |
|
|
|
from .database import ( |
|
get_student_data, |
|
store_application_request, |
|
store_morphosyntax_result, |
|
store_semantic_result, |
|
store_discourse_analysis_result, |
|
store_chat_history, |
|
create_admin_user, |
|
create_student_user |
|
) |
|
|
|
|
|
from .admin_ui import admin_page |
|
|
|
|
|
from .morpho_analysis import generate_arc_diagram, get_repeated_words_colors, highlight_repeated_words, POS_COLORS, POS_TRANSLATIONS |
|
from .semantic_analysis import visualize_semantic_relations, perform_semantic_analysis |
|
from .discourse_analysis import compare_semantic_analysis, perform_discourse_analysis |
|
from .chatbot import initialize_chatbot, get_chatbot_response |
|
|
|
|
|
def initialize_session_state(): |
|
if 'initialized' not in st.session_state: |
|
st.session_state.clear() |
|
st.session_state.initialized = True |
|
st.session_state.logged_in = False |
|
st.session_state.page = 'login' |
|
st.session_state.username = None |
|
st.session_state.role = None |
|
|
|
|
|
def main(): |
|
initialize_session_state() |
|
|
|
print(f"Página actual: {st.session_state.page}") |
|
print(f"Rol del usuario: {st.session_state.role}") |
|
|
|
if st.session_state.page == 'login': |
|
login_register_page() |
|
elif st.session_state.page == 'admin': |
|
print("Intentando mostrar página de admin") |
|
admin_page() |
|
elif st.session_state.page == 'user': |
|
user_page() |
|
else: |
|
print(f"Página no reconocida: {st.session_state.page}") |
|
|
|
print(f"Estado final de la sesión: {st.session_state}") |
|
|
|
|
|
def login_register_page(): |
|
st.title("AIdeaText") |
|
|
|
left_column, right_column = st.columns([1, 3]) |
|
|
|
with left_column: |
|
tab1, tab2 = st.tabs(["Iniciar Sesión", "Registrarse"]) |
|
|
|
with tab1: |
|
login_form() |
|
|
|
with tab2: |
|
register_form() |
|
|
|
with right_column: |
|
display_videos_and_info() |
|
|
|
|
|
|
|
def login_form(): |
|
username = st.text_input("Correo electrónico", key="login_username") |
|
password = st.text_input("Contraseña", type="password", key="login_password") |
|
|
|
if st.button("Iniciar Sesión", key="login_button"): |
|
success, role = authenticate_user(username, password) |
|
if success: |
|
st.session_state.logged_in = True |
|
st.session_state.username = username |
|
st.session_state.role = role |
|
st.session_state.page = 'admin' if role == 'Administrador' else 'user' |
|
print(f"Inicio de sesión exitoso. Usuario: {username}, Rol: {role}") |
|
print(f"Estado de sesión después de login: {st.session_state}") |
|
st.rerun() |
|
else: |
|
st.error("Credenciales incorrectas") |
|
|
|
|
|
def admin_page(): |
|
st.title("Panel de Administración") |
|
st.write(f"Bienvenido, {st.session_state.username}") |
|
|
|
st.header("Crear Nuevo Usuario Estudiante") |
|
new_username = st.text_input("Correo electrónico del nuevo usuario", key="admin_new_username") |
|
new_password = st.text_input("Contraseña", type="password", key="admin_new_password") |
|
if st.button("Crear Usuario", key="admin_create_user"): |
|
if create_student_user(new_username, new_password): |
|
st.success(f"Usuario estudiante {new_username} creado exitosamente") |
|
else: |
|
st.error("Error al crear el usuario estudiante") |
|
|
|
|
|
|
|
|
|
def user_page(): |
|
st.title("Bienvenido a AIdeaText") |
|
st.write(f"Hola, {st.session_state.username}") |
|
|
|
|
|
|
|
tabs = st.tabs(["Análisis Morfosintáctico", "Análisis Semántico", "Análisis del Discurso", "Chat", "Mi Progreso"]) |
|
|
|
with tabs[0]: |
|
display_morphosyntax_analysis_interface(nlp_models, 'es') |
|
with tabs[1]: |
|
display_semantic_analysis_interface(nlp_models, 'es') |
|
with tabs[2]: |
|
display_discourse_analysis_interface(nlp_models, 'es') |
|
with tabs[3]: |
|
display_chatbot_interface('es') |
|
with tabs[4]: |
|
display_student_progress(st.session_state.username, 'es') |
|
|
|
|
|
def display_videos_and_info(): |
|
st.header("Videos: pitch, demos, entrevistas, otros") |
|
|
|
videos = { |
|
"Intro AideaText": "https://www.youtube.com/watch?v=UA-md1VxaRc", |
|
"Pitch IFE Explora": "https://www.youtube.com/watch?v=Fqi4Di_Rj_s", |
|
"Entrevista Dr. Guillermo Ruíz": "https://www.youtube.com/watch?v=_ch8cRja3oc", |
|
"Demo versión desktop": "https://www.youtube.com/watch?v=nP6eXbog-ZY" |
|
} |
|
|
|
selected_title = st.selectbox("Selecciona un video tutorial:", list(videos.keys())) |
|
|
|
if selected_title in videos: |
|
try: |
|
st_player(videos[selected_title]) |
|
except Exception as e: |
|
st.error(f"Error al cargar el video: {str(e)}") |
|
|
|
st.markdown(""" |
|
## Novedades de la versión actual |
|
- Nueva función de análisis semántico |
|
- Soporte para múltiples idiomas |
|
- Interfaz mejorada para una mejor experiencia de usuario |
|
""") |
|
|
|
|
|
def register_form(): |
|
st.header("Solicitar prueba de la aplicación") |
|
|
|
name = st.text_input("Nombre completo") |
|
email = st.text_input("Correo electrónico institucional") |
|
institution = st.text_input("Institución") |
|
role = st.selectbox("Rol en tu institución", ["Estudiante", "Profesor", "Investigador", "Otro"]) |
|
reason = st.text_area("¿Por qué estás interesado en probar AIdeaText?") |
|
|
|
if st.button("Enviar solicitud"): |
|
if not name or not email or not institution or not reason: |
|
st.error("Por favor, completa todos los campos.") |
|
elif not is_institutional_email(email): |
|
st.error("Por favor, utiliza un correo electrónico institucional.") |
|
else: |
|
success = store_application_request(name, email, institution, role, reason) |
|
if success: |
|
send_email_notification(name, email, institution, role, reason) |
|
st.success("Tu solicitud ha sido enviada. Te contactaremos pronto.") |
|
logger.info(f"Application request stored successfully for {email}") |
|
else: |
|
st.error("Hubo un problema al enviar tu solicitud. Por favor, intenta de nuevo más tarde.") |
|
logger.error(f"Failed to store application request for {email}") |
|
|
|
def is_institutional_email(email): |
|
forbidden_domains = ['gmail.com', 'hotmail.com', 'yahoo.com', 'outlook.com'] |
|
return not any(domain in email.lower() for domain in forbidden_domains) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def display_student_progress(username, lang_code='es'): |
|
student_data = get_student_data(username) |
|
|
|
if student_data is None or len(student_data['entries']) == 0: |
|
st.warning("No se encontraron datos para este estudiante.") |
|
st.info("Intenta realizar algunos análisis de texto primero.") |
|
return |
|
|
|
st.title(f"Progreso de {username}") |
|
|
|
with st.expander("Resumen de Actividades y Progreso", expanded=True): |
|
|
|
total_entries = len(student_data['entries']) |
|
st.write(f"Total de análisis realizados: {total_entries}") |
|
|
|
|
|
analysis_types = [entry['analysis_type'] for entry in student_data['entries']] |
|
analysis_counts = pd.Series(analysis_types).value_counts() |
|
|
|
fig, ax = plt.subplots() |
|
analysis_counts.plot(kind='bar', ax=ax) |
|
ax.set_title("Tipos de análisis realizados") |
|
ax.set_xlabel("Tipo de análisis") |
|
ax.set_ylabel("Cantidad") |
|
st.pyplot(fig) |
|
|
|
|
|
dates = [datetime.fromisoformat(entry['timestamp']) for entry in student_data['entries']] |
|
analysis_counts = pd.Series(dates).value_counts().sort_index() |
|
|
|
fig, ax = plt.subplots() |
|
analysis_counts.plot(kind='line', ax=ax) |
|
ax.set_title("Análisis realizados a lo largo del tiempo") |
|
ax.set_xlabel("Fecha") |
|
ax.set_ylabel("Cantidad de análisis") |
|
st.pyplot(fig) |
|
|
|
with st.expander("Histórico de Análisis Morfosintácticos"): |
|
morphosyntax_entries = [entry for entry in student_data['entries'] if entry['analysis_type'] == 'morphosyntax'] |
|
for entry in morphosyntax_entries: |
|
st.subheader(f"Análisis del {entry['timestamp']}") |
|
if entry['arc_diagrams']: |
|
st.write(entry['arc_diagrams'][0], unsafe_allow_html=True) |
|
|
|
with st.expander("Histórico de Análisis Semánticos"): |
|
semantic_entries = [entry for entry in student_data['entries'] if entry['analysis_type'] == 'semantic'] |
|
for entry in semantic_entries: |
|
st.subheader(f"Análisis del {entry['timestamp']}") |
|
st.write(f"Archivo analizado: {entry.get('filename', 'Nombre no disponible')}") |
|
if 'network_diagram' in entry: |
|
try: |
|
|
|
image_bytes = base64.b64decode(entry['network_diagram']) |
|
st.image(image_bytes) |
|
except Exception as e: |
|
st.error(f"No se pudo mostrar la imagen: {str(e)}") |
|
st.write("Datos de la imagen (para depuración):") |
|
st.write(entry['network_diagram'][:100] + "...") |
|
|
|
with st.expander("Histórico de Análisis Discursivos"): |
|
discourse_entries = [entry for entry in student_data['entries'] if entry['analysis_type'] == 'discourse'] |
|
for entry in discourse_entries: |
|
st.subheader(f"Análisis del {entry['timestamp']}") |
|
st.write(f"Archivo patrón: {entry.get('filename1', 'Nombre no disponible')}") |
|
st.write(f"Archivo comparado: {entry.get('filename2', 'Nombre no disponible')}") |
|
|
|
try: |
|
|
|
if 'graph1' in entry and 'graph2' in entry: |
|
img1 = Image.open(BytesIO(base64.b64decode(entry['graph1']))) |
|
img2 = Image.open(BytesIO(base64.b64decode(entry['graph2']))) |
|
|
|
|
|
total_width = img1.width + img2.width |
|
max_height = max(img1.height, img2.height) |
|
combined_img = Image.new('RGB', (total_width, max_height)) |
|
|
|
|
|
combined_img.paste(img1, (0, 0)) |
|
combined_img.paste(img2, (img1.width, 0)) |
|
|
|
|
|
buffered = BytesIO() |
|
combined_img.save(buffered, format="PNG") |
|
img_str = base64.b64encode(buffered.getvalue()).decode() |
|
|
|
|
|
st.image(f"data:image/png;base64,{img_str}") |
|
elif 'combined_graph' in entry: |
|
|
|
img_bytes = base64.b64decode(entry['combined_graph']) |
|
st.image(img_bytes) |
|
else: |
|
st.write("No se encontraron gráficos para este análisis.") |
|
except Exception as e: |
|
st.error(f"No se pudieron mostrar los gráficos: {str(e)}") |
|
st.write("Datos de los gráficos (para depuración):") |
|
if 'graph1' in entry: |
|
st.write("Graph 1:", entry['graph1'][:100] + "...") |
|
if 'graph2' in entry: |
|
st.write("Graph 2:", entry['graph2'][:100] + "...") |
|
if 'combined_graph' in entry: |
|
st.write("Combined Graph:", entry['combined_graph'][:100] + "...") |
|
|
|
with st.expander("Histórico de Conversaciones con el ChatBot"): |
|
if 'chat_history' in student_data: |
|
for i, chat in enumerate(student_data['chat_history']): |
|
st.subheader(f"Conversación {i+1} - {chat['timestamp']}") |
|
for message in chat['messages']: |
|
if message['role'] == 'user': |
|
st.write("Usuario: " + message['content']) |
|
else: |
|
st.write("Asistente: " + message['content']) |
|
st.write("---") |
|
else: |
|
st.write("No se encontraron conversaciones con el ChatBot.") |
|
|
|
|
|
if st.checkbox("Mostrar datos de depuración"): |
|
st.write("Datos del estudiante (para depuración):") |
|
st.json(student_data) |
|
|
|
|
|
def display_morphosyntax_analysis_interface(nlp_models, lang_code): |
|
translations = { |
|
'es': { |
|
'title': "AIdeaText - Análisis morfológico y sintáctico", |
|
'input_label': "Ingrese un texto para analizar (máx. 5,000 palabras):", |
|
'input_placeholder': "El objetivo de esta aplicación es que mejore sus habilidades de redacción...", |
|
'analyze_button': "Analizar texto", |
|
'repeated_words': "Palabras repetidas", |
|
'legend': "Leyenda: Categorías gramaticales", |
|
'arc_diagram': "Análisis sintáctico: Diagrama de arco", |
|
'sentence': "Oración", |
|
'success_message': "Análisis guardado correctamente.", |
|
'error_message': "Hubo un problema al guardar el análisis. Por favor, inténtelo de nuevo.", |
|
'warning_message': "Por favor, ingrese un texto para analizar." |
|
}, |
|
'en': { |
|
'title': "AIdeaText - Morphological and Syntactic Analysis", |
|
'input_label': "Enter a text to analyze (max 5,000 words):", |
|
'input_placeholder': "The goal of this app is for you to improve your writing skills...", |
|
'analyze_button': "Analyze text", |
|
'repeated_words': "Repeated words", |
|
'legend': "Legend: Grammatical categories", |
|
'arc_diagram': "Syntactic analysis: Arc diagram", |
|
'sentence': "Sentence", |
|
'success_message': "Analysis saved successfully.", |
|
'error_message': "There was a problem saving the analysis. Please try again.", |
|
'warning_message': "Please enter a text to analyze." |
|
}, |
|
'fr': { |
|
'title': "AIdeaText - Analyse morphologique et syntaxique", |
|
'input_label': "Entrez un texte à analyser (max 5 000 mots) :", |
|
'input_placeholder': "Le but de cette application est d'améliorer vos compétences en rédaction...", |
|
'analyze_button': "Analyser le texte", |
|
'repeated_words': "Mots répétés", |
|
'legend': "Légende : Catégories grammaticales", |
|
'arc_diagram': "Analyse syntaxique : Diagramme en arc", |
|
'sentence': "Phrase", |
|
'success_message': "Analyse enregistrée avec succès.", |
|
'error_message': "Un problème est survenu lors de l'enregistrement de l'analyse. Veuillez réessayer.", |
|
'warning_message': "Veuillez entrer un texte à analyser." |
|
} |
|
} |
|
|
|
t = translations[lang_code] |
|
|
|
input_key = f"morphosyntax_input_{lang_code}" |
|
|
|
if input_key not in st.session_state: |
|
st.session_state[input_key] = "" |
|
|
|
sentence_input = st.text_area( |
|
t['input_label'], |
|
height=150, |
|
placeholder=t['input_placeholder'], |
|
value=st.session_state[input_key], |
|
key=f"text_area_{lang_code}", |
|
on_change=lambda: setattr(st.session_state, input_key, st.session_state[f"text_area_{lang_code}"]) |
|
) |
|
|
|
if st.button(t['analyze_button'], key=f"analyze_button_{lang_code}"): |
|
current_input = st.session_state[input_key] |
|
if current_input: |
|
doc = nlp_models[lang_code](current_input) |
|
|
|
word_colors = get_repeated_words_colors(doc) |
|
|
|
with st.expander(t['repeated_words'], expanded=True): |
|
highlighted_text = highlight_repeated_words(doc, word_colors) |
|
st.markdown(highlighted_text, unsafe_allow_html=True) |
|
|
|
st.markdown(f"##### {t['legend']}") |
|
legend_html = "<div style='display: flex; flex-wrap: wrap;'>" |
|
for pos, color in POS_COLORS.items(): |
|
if pos in POS_TRANSLATIONS[lang_code]: |
|
legend_html += f"<div style='margin-right: 10px;'><span style='background-color: {color}; padding: 2px 5px;'>{POS_TRANSLATIONS[lang_code][pos]}</span></div>" |
|
legend_html += "</div>" |
|
st.markdown(legend_html, unsafe_allow_html=True) |
|
|
|
with st.expander(t['arc_diagram'], expanded=True): |
|
sentences = list(doc.sents) |
|
arc_diagrams = [] |
|
for i, sent in enumerate(sentences): |
|
st.subheader(f"{t['sentence']} {i+1}") |
|
html = displacy.render(sent, style="dep", options={"distance": 100}) |
|
html = html.replace('height="375"', 'height="200"') |
|
html = re.sub(r'<svg[^>]*>', lambda m: m.group(0).replace('height="450"', 'height="300"'), html) |
|
html = re.sub(r'<g [^>]*transform="translate\((\d+),(\d+)\)"', lambda m: f'<g transform="translate({m.group(1)},50)"', html) |
|
st.write(html, unsafe_allow_html=True) |
|
arc_diagrams.append(html) |
|
|
|
if store_morphosyntax_result( |
|
st.session_state.username, |
|
current_input, |
|
word_colors, |
|
arc_diagrams, |
|
): |
|
st.success(t['success_message']) |
|
else: |
|
st.error(t['error_message']) |
|
else: |
|
st.warning(t['warning_message']) |
|
|
|
|
|
def display_semantic_analysis_interface(nlp_models, lang_code): |
|
translations = { |
|
'es': { |
|
'title': "AIdeaText - Análisis semántico", |
|
'file_uploader': "Cargar archivo de texto", |
|
'analyze_button': "Analizar texto", |
|
'semantic_relations': "Relaciones Semánticas Relevantes", |
|
'success_message': "Análisis semántico guardado correctamente.", |
|
'error_message': "Hubo un problema al guardar el análisis semántico. Por favor, inténtelo de nuevo.", |
|
'warning_message': "Por favor, cargue un archivo para analizar." |
|
}, |
|
'en': { |
|
'title': "AIdeaText - Semantic Analysis", |
|
'file_uploader': "Upload text file", |
|
'analyze_button': "Analyze text", |
|
'semantic_relations': "Relevant Semantic Relations", |
|
'success_message': "Semantic analysis saved successfully.", |
|
'error_message': "There was a problem saving the semantic analysis. Please try again.", |
|
'warning_message': "Please upload a file to analyze." |
|
}, |
|
'fr': { |
|
'title': "AIdeaText - Analyse sémantique", |
|
'file_uploader': "Télécharger le fichier texte", |
|
'analyze_button': "Analyser le texte", |
|
'semantic_relations': "Relations Sémantiques Pertinentes", |
|
'success_message': "Analyse sémantique enregistrée avec succès.", |
|
'error_message': "Un problème est survenu lors de l'enregistrement de l'analyse sémantique. Veuillez réessayer.", |
|
'warning_message': "Veuillez télécharger un fichier à analyser." |
|
} |
|
} |
|
|
|
t = translations[lang_code] |
|
st.header(t['title']) |
|
|
|
|
|
uploaded_file = st.file_uploader(t['file_uploader'], type=['txt']) |
|
|
|
if st.button(t['analyze_button']): |
|
if uploaded_file is not None: |
|
text_content = uploaded_file.getvalue().decode('utf-8') |
|
|
|
|
|
relations_graph = perform_semantic_analysis(text_content, nlp_models[lang_code], lang_code) |
|
|
|
|
|
with st.expander(t['semantic_relations'], expanded=True): |
|
st.pyplot(relations_graph) |
|
|
|
|
|
if store_semantic_result(st.session_state.username, text_content, relations_graph): |
|
st.success(t['success_message']) |
|
else: |
|
st.error(t['error_message']) |
|
else: |
|
st.warning(t['warning_message']) |
|
|
|
|
|
def display_discourse_analysis_interface(nlp_models, lang_code): |
|
translations = { |
|
'es': { |
|
'title': "AIdeaText - Análisis del discurso", |
|
'file_uploader1': "Cargar archivo de texto 1 (Patrón)", |
|
'file_uploader2': "Cargar archivo de texto 2 (Comparación)", |
|
'analyze_button': "Analizar textos", |
|
'comparison': "Comparación de Relaciones Semánticas", |
|
'success_message': "Análisis del discurso guardado correctamente.", |
|
'error_message': "Hubo un problema al guardar el análisis del discurso. Por favor, inténtelo de nuevo.", |
|
'warning_message': "Por favor, cargue ambos archivos para analizar." |
|
}, |
|
'en': { |
|
'title': "AIdeaText - Discourse Analysis", |
|
'file_uploader1': "Upload text file 1 (Pattern)", |
|
'file_uploader2': "Upload text file 2 (Comparison)", |
|
'analyze_button': "Analyze texts", |
|
'comparison': "Comparison of Semantic Relations", |
|
'success_message': "Discourse analysis saved successfully.", |
|
'error_message': "There was a problem saving the discourse analysis. Please try again.", |
|
'warning_message': "Please upload both files to analyze." |
|
}, |
|
'fr': { |
|
'title': "AIdeaText - Analyse du discours", |
|
'file_uploader1': "Télécharger le fichier texte 1 (Modèle)", |
|
'file_uploader2': "Télécharger le fichier texte 2 (Comparaison)", |
|
'analyze_button': "Analyser les textes", |
|
'comparison': "Comparaison des Relations Sémantiques", |
|
'success_message': "Analyse du discours enregistrée avec succès.", |
|
'error_message': "Un problème est survenu lors de l'enregistrement de l'analyse du discours. Veuillez réessayer.", |
|
'warning_message': "Veuillez télécharger les deux fichiers à analyser." |
|
} |
|
} |
|
|
|
t = translations[lang_code] |
|
st.header(t['title']) |
|
|
|
col1, col2 = st.columns(2) |
|
|
|
with col1: |
|
uploaded_file1 = st.file_uploader(t['file_uploader1'], type=['txt']) |
|
|
|
with col2: |
|
uploaded_file2 = st.file_uploader(t['file_uploader2'], type=['txt']) |
|
|
|
if st.button(t['analyze_button']): |
|
if uploaded_file1 is not None and uploaded_file2 is not None: |
|
text_content1 = uploaded_file1.getvalue().decode('utf-8') |
|
text_content2 = uploaded_file2.getvalue().decode('utf-8') |
|
|
|
|
|
graph1, graph2 = perform_discourse_analysis(text_content1, text_content2, nlp_models[lang_code], lang_code) |
|
|
|
|
|
st.subheader(t['comparison']) |
|
col1, col2 = st.columns(2) |
|
with col1: |
|
st.pyplot(graph1) |
|
with col2: |
|
st.pyplot(graph2) |
|
|
|
|
|
|
|
if store_discourse_analysis_result(st.session_state.username, text_content1, text_content2, graph1, graph2): |
|
st.success(t['success_message']) |
|
else: |
|
st.error(t['error_message']) |
|
else: |
|
st.warning(t['warning_message']) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def display_chatbot_interface(lang_code): |
|
translations = { |
|
'es': { |
|
'title': "Expertos en Vacaciones", |
|
'input_placeholder': "Escribe tu mensaje aquí...", |
|
'initial_message': "¡Hola! ¿Cómo podemos ayudarte?" |
|
}, |
|
'en': { |
|
'title': "Vacation Experts", |
|
'input_placeholder': "Type your message here...", |
|
'initial_message': "Hi! How can we help you?" |
|
}, |
|
'fr': { |
|
'title': "Experts en Vacances", |
|
'input_placeholder': "Écrivez votre message ici...", |
|
'initial_message': "Bonjour! Comment pouvons-nous vous aider?" |
|
} |
|
} |
|
t = translations[lang_code] |
|
st.title(t['title']) |
|
|
|
if 'chatbot' not in st.session_state: |
|
st.session_state.chatbot = initialize_chatbot() |
|
if 'messages' not in st.session_state: |
|
st.session_state.messages = [{"role": "assistant", "content": t['initial_message']}] |
|
|
|
|
|
chat_container = st.container() |
|
|
|
|
|
with chat_container: |
|
for message in st.session_state.messages: |
|
with st.chat_message(message["role"]): |
|
st.markdown(message["content"]) |
|
|
|
|
|
user_input = st.chat_input(t['input_placeholder']) |
|
|
|
if user_input: |
|
|
|
st.session_state.messages.append({"role": "user", "content": user_input}) |
|
|
|
|
|
with chat_container: |
|
with st.chat_message("user"): |
|
st.markdown(user_input) |
|
|
|
|
|
with chat_container: |
|
with st.chat_message("assistant"): |
|
message_placeholder = st.empty() |
|
full_response = "" |
|
for chunk in get_chatbot_response(st.session_state.chatbot, user_input, lang_code): |
|
full_response += chunk |
|
message_placeholder.markdown(full_response + "▌") |
|
message_placeholder.markdown(full_response) |
|
|
|
|
|
st.session_state.messages.append({"role": "assistant", "content": full_response}) |
|
|
|
|
|
try: |
|
store_chat_history(st.session_state.username, st.session_state.messages) |
|
st.success("Conversación guardada exitosamente") |
|
except Exception as e: |
|
st.error(f"Error al guardar la conversación: {str(e)}") |
|
logger.error(f"Error al guardar el historial de chat para {st.session_state.username}: {str(e)}") |
|
|
|
|
|
st.markdown('<script>window.scrollTo(0,document.body.scrollHeight);</script>', unsafe_allow_html=True) |
|
|
|
|
|
if __name__ == "__main__": |
|
main() |