|
|
|
|
|
import streamlit as st |
|
from streamlit_float import * |
|
from streamlit_antd_components import * |
|
from streamlit.components.v1 import html |
|
import io |
|
from io import BytesIO |
|
import base64 |
|
import matplotlib.pyplot as plt |
|
import pandas as pd |
|
import re |
|
import logging |
|
|
|
|
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
from .semantic_process import ( |
|
process_semantic_input, |
|
format_semantic_results |
|
) |
|
|
|
from ..utils.widget_utils import generate_unique_key |
|
from ..database.semantic_mongo_db import store_student_semantic_result |
|
from ..database.semantic_export import export_user_interactions |
|
|
|
|
|
|
|
|
|
|
|
def display_semantic_interface(lang_code, nlp_models, semantic_t): |
|
""" |
|
Interfaz para el análisis semántico con controles alineados horizontalmente |
|
""" |
|
try: |
|
|
|
if 'semantic_analysis_counter' not in st.session_state: |
|
st.session_state.semantic_analysis_counter = 0 |
|
if 'semantic_file_content' not in st.session_state: |
|
st.session_state.semantic_file_content = None |
|
if 'semantic_analysis_done' not in st.session_state: |
|
st.session_state.semantic_analysis_done = False |
|
|
|
|
|
with st.container(): |
|
|
|
col1, col2, col3, col4 = st.columns([3, 1, 1, 1]) |
|
|
|
|
|
with col1: |
|
uploaded_file = st.file_uploader( |
|
semantic_t.get('file_uploader', 'Upload TXT file'), |
|
type=['txt'], |
|
key=f"semantic_file_uploader_{st.session_state.semantic_analysis_counter}", |
|
on_change=lambda: handle_file_upload(uploaded_file) |
|
) |
|
|
|
|
|
with col2: |
|
analyze_button = st.button( |
|
semantic_t.get('analyze_button', 'Analyze Text'), |
|
disabled=not st.session_state.semantic_file_content, |
|
use_container_width=True, |
|
key="analyze_semantic" |
|
) |
|
|
|
|
|
with col3: |
|
export_button = st.button( |
|
semantic_t.get('export_button', 'Export Analysis'), |
|
disabled=not st.session_state.semantic_analysis_done, |
|
use_container_width=True, |
|
key="export_semantic" |
|
) |
|
|
|
|
|
with col4: |
|
new_analysis_button = st.button( |
|
semantic_t.get('new_analysis_button', 'New Analysis'), |
|
disabled=not st.session_state.semantic_analysis_done, |
|
use_container_width=True, |
|
key="new_semantic" |
|
) |
|
|
|
|
|
st.markdown("<hr style='margin: 1em 0; padding: 0; opacity: 0.3'>", unsafe_allow_html=True) |
|
|
|
|
|
if analyze_button and st.session_state.semantic_file_content: |
|
try: |
|
with st.spinner(semantic_t.get('processing', 'Processing...')): |
|
doc = nlp_models[lang_code](st.session_state.semantic_file_content) |
|
|
|
|
|
st.markdown("### Semantic Analysis Results") |
|
|
|
|
|
spacy_streamlit.visualize_ner( |
|
doc, |
|
labels=nlp_models[lang_code].get_pipe("ner").labels |
|
) |
|
|
|
|
|
spacy_streamlit.visualize_parser(doc) |
|
|
|
st.session_state.semantic_analysis_done = True |
|
st.session_state.semantic_result = {'doc': doc} |
|
|
|
except Exception as e: |
|
logger.error(f"Error en análisis semántico: {str(e)}") |
|
st.error(semantic_t.get('error_processing', f'Error processing text: {str(e)}')) |
|
|
|
|
|
if export_button and st.session_state.semantic_analysis_done: |
|
try: |
|
pdf_buffer = export_user_interactions(st.session_state.username, 'semantic') |
|
st.download_button( |
|
label=semantic_t.get('download_pdf', 'Download PDF'), |
|
data=pdf_buffer, |
|
file_name="semantic_analysis.pdf", |
|
mime="application/pdf", |
|
key=f"semantic_download_{st.session_state.semantic_analysis_counter}" |
|
) |
|
except Exception as e: |
|
st.error(f"Error exporting analysis: {str(e)}") |
|
|
|
|
|
if new_analysis_button: |
|
st.session_state.semantic_file_content = None |
|
st.session_state.semantic_analysis_done = False |
|
st.session_state.semantic_result = None |
|
st.session_state.semantic_analysis_counter += 1 |
|
st.rerun() |
|
|
|
|
|
if not st.session_state.semantic_file_content and not st.session_state.semantic_analysis_done: |
|
st.info(semantic_t.get('initial_message', 'Upload a TXT file to begin analysis')) |
|
|
|
except Exception as e: |
|
logger.error(f"Error general en interfaz semántica: {str(e)}") |
|
st.error("Se produjo un error. Por favor, intente de nuevo.") |
|
|
|
|
|
|
|
|
|
def display_semantic_results(result, lang_code, semantic_t): |
|
""" |
|
Muestra los resultados del análisis semántico en tabs |
|
""" |
|
if result is None or not result['success']: |
|
st.warning(semantic_t.get('no_results', 'No results available')) |
|
return |
|
|
|
analysis = result['analysis'] |
|
|
|
|
|
tab1, tab2 = st.tabs([ |
|
semantic_t.get('concepts_tab', 'Key Concepts Analysis'), |
|
semantic_t.get('entities_tab', 'Entities Analysis') |
|
]) |
|
|
|
|
|
with tab1: |
|
col1, col2 = st.columns(2) |
|
|
|
|
|
with col1: |
|
st.subheader(semantic_t.get('key_concepts', 'Key Concepts')) |
|
concept_text = "\n".join([ |
|
f"• {concept} ({frequency:.2f})" |
|
for concept, frequency in analysis['key_concepts'] |
|
]) |
|
st.markdown(concept_text) |
|
|
|
|
|
with col2: |
|
st.subheader(semantic_t.get('concept_graph', 'Concepts Graph')) |
|
st.image(analysis['concept_graph']) |
|
|
|
|
|
with tab2: |
|
col1, col2 = st.columns(2) |
|
|
|
|
|
with col1: |
|
st.subheader(semantic_t.get('identified_entities', 'Identified Entities')) |
|
if 'entities' in analysis: |
|
for entity_type, entities in analysis['entities'].items(): |
|
st.markdown(f"**{entity_type}**") |
|
st.markdown("• " + "\n• ".join(entities)) |
|
|
|
|
|
with col2: |
|
st.subheader(semantic_t.get('entity_graph', 'Entities Graph')) |
|
st.image(analysis['entity_graph']) |
|
|
|
|
|
col1, col2, col3 = st.columns([2,1,2]) |
|
with col2: |
|
if st.button( |
|
semantic_t.get('export_button', 'Export Analysis'), |
|
key=f"semantic_export_{st.session_state.semantic_analysis_counter}", |
|
use_container_width=True |
|
): |
|
pdf_buffer = export_user_interactions(st.session_state.username, 'semantic') |
|
st.download_button( |
|
label=semantic_t.get('download_pdf', 'Download PDF'), |
|
data=pdf_buffer, |
|
file_name="semantic_analysis.pdf", |
|
mime="application/pdf", |
|
key=f"semantic_download_{st.session_state.semantic_analysis_counter}" |
|
) |