import streamlit as st import logging from .semantic_process import process_semantic_analysis from ..chatbot.chatbot import initialize_chatbot, process_semantic_chat_input from ..database.database_oldFromV2 import store_file_semantic_contents, retrieve_file_contents, delete_file, get_user_files from ..utils.widget_utils import generate_unique_key from .flexible_analysis_handler import FlexibleAnalysisHandler # Añade esta línea logger = logging.getLogger(__name__) def get_translation(t, key, default): return t.get(key, default) def display_semantic_interface(lang_code, nlp_models, t): # Inicializar el chatbot al principio de la función if 'semantic_chatbot' not in st.session_state: st.session_state.semantic_chatbot = initialize_chatbot('semantic') st.markdown(""" """, unsafe_allow_html=True) # Mostrar el mensaje inicial como un párrafo estilizado st.markdown(f"""
""", unsafe_allow_html=True) tab1, tab2 = st.tabs(["Upload", "Analyze"]) with tab1: st.subheader("File Management") uploaded_file = st.file_uploader("Choose a file to upload", type=['txt', 'pdf', 'docx', 'doc', 'odt'], key=generate_unique_key('semantic', 'file_uploader')) if uploaded_file is not None: file_contents = uploaded_file.getvalue().decode('utf-8') if store_file_semantic_contents(st.session_state.username, uploaded_file.name, file_contents): st.success(f"File {uploaded_file.name} uploaded and saved successfully") else: st.error("Error uploading file") st.markdown("---") # Línea separadora st.subheader("Manage Uploaded Files") user_files = get_user_files(st.session_state.username, 'semantic') if user_files: for file in user_files: col1, col2 = st.columns([3, 1]) with col1: st.write(file['file_name']) with col2: if st.button("Delete", key=f"delete_{file['file_name']}", help=f"Delete {file['file_name']}"): if delete_file(st.session_state.username, file['file_name'], 'semantic'): st.success(f"File {file['file_name']} deleted successfully") st.rerun() else: st.error(f"Error deleting file {file['file_name']}") else: st.info("No files uploaded yet.") with tab2: st.subheader("Select File for Analysis") user_files = get_user_files(st.session_state.username, 'semantic') file_options = [get_translation(t, 'select_saved_file', 'Select a saved file')] + [file['file_name'] for file in user_files] selected_file = st.selectbox("", options=file_options, key=generate_unique_key('semantic', 'file_selector')) if st.button("Analyze Document", key=generate_unique_key('semantic', 'analyze_document')): if selected_file and selected_file != get_translation(t, 'select_saved_file', 'Select a saved file'): file_contents = retrieve_file_contents(st.session_state.username, selected_file, 'semantic') if file_contents: st.session_state.file_contents = file_contents with st.spinner("Analyzing..."): try: nlp_model = nlp_models[lang_code] analysis_result = process_semantic_analysis(file_contents, nlp_model, lang_code) handler = FlexibleAnalysisHandler(analysis_result) st.session_state.concept_graph = handler.get_concept_graph() st.session_state.entity_graph = handler.get_entity_graph() st.session_state.key_concepts = handler.get_key_concepts() st.success("Analysis completed successfully") except Exception as e: logger.error(f"Error during analysis: {str(e)}") st.error(f"Error during analysis: {str(e)}") else: st.error("Error loading file contents") else: st.error("Please select a file to analyze") # Chat and Visualization col_chat, col_graph = st.columns([1, 1]) with col_chat: st.subheader("Chat with AI") st.markdown('