AIdeaText commited on
Commit
dd52ef3
verified
1 Parent(s): c7b4af7

Update modules/semantic/semantic_interface.py

Browse files
Files changed (1) hide show
  1. modules/semantic/semantic_interface.py +115 -74
modules/semantic/semantic_interface.py CHANGED
@@ -17,7 +17,7 @@ from .semantic_process import (
17
 
18
  from ..utils.widget_utils import generate_unique_key
19
  from ..database.semantic_mongo_db import store_student_semantic_result
20
- from ..database.semantic_export import export_user_interactions
21
 
22
  import logging
23
  logger = logging.getLogger(__name__)
@@ -30,75 +30,96 @@ def display_semantic_interface(lang_code, nlp_models, semantic_t):
30
  nlp_models: Modelos de spaCy cargados
31
  semantic_t: Diccionario de traducciones sem谩nticas
32
  """
33
- # Inicializar el estado de la entrada
34
- input_key = f"semantic_input_{lang_code}"
35
- if input_key not in st.session_state:
36
- st.session_state[input_key] = ""
37
-
38
- # Inicializar contador de an谩lisis si no existe
39
- if 'semantic_analysis_counter' not in st.session_state:
40
- st.session_state.semantic_analysis_counter = 0
41
-
42
- # Campo de entrada de texto
43
- text_input = st.text_area(
44
- semantic_t.get('text_input_label', 'Enter text to analyze'),
45
- height=150,
46
- placeholder=semantic_t.get('text_input_placeholder', 'Enter your text here...'),
47
- value=st.session_state[input_key],
48
- key=f"text_area_{lang_code}_{st.session_state.semantic_analysis_counter}"
49
- )
50
-
51
- # Opci贸n para cargar archivo
52
- uploaded_file = st.file_uploader(
53
- semantic_t.get('file_uploader', 'Or upload a text file'),
54
- type=['txt'],
55
- key=f"file_uploader_{lang_code}_{st.session_state.semantic_analysis_counter}"
56
- )
57
-
58
- if st.button(
59
- semantic_t.get('analyze_button', 'Analyze text'),
60
- key=f"analyze_button_{lang_code}_{st.session_state.semantic_analysis_counter}"
61
- ):
62
- if text_input or uploaded_file is not None:
63
- try:
64
- with st.spinner(semantic_t.get('processing', 'Processing...')):
65
- # Obtener el texto a analizar
66
- text_content = uploaded_file.getvalue().decode('utf-8') if uploaded_file else text_input
67
-
68
- # Realizar el an谩lisis
69
- analysis_result = process_semantic_input(
70
- text_content,
71
- lang_code,
72
- nlp_models,
73
- semantic_t
74
- )
75
-
76
- # Guardar resultado en el estado de la sesi贸n
77
- st.session_state.semantic_result = analysis_result
78
- st.session_state.semantic_analysis_counter += 1
79
-
80
- # Mostrar resultados
81
- display_semantic_results(
82
- st.session_state.semantic_result,
83
- lang_code,
84
- semantic_t
85
- )
86
-
87
- except Exception as e:
88
- logger.error(f"Error en an谩lisis sem谩ntico: {str(e)}")
89
- st.error(semantic_t.get('error_processing', f'Error processing text: {str(e)}'))
90
- else:
91
- st.warning(semantic_t.get('warning_message', 'Please enter text or upload a file'))
92
 
93
- # Si no se presion贸 el bot贸n, verificar si hay resultados previos
94
- elif 'semantic_result' in st.session_state and st.session_state.semantic_result is not None:
95
- display_semantic_results(
96
- st.session_state.semantic_result,
97
- lang_code,
98
- semantic_t
 
 
 
 
 
99
  )
100
- else:
101
- st.info(semantic_t.get('initial_message', 'Enter text to begin analysis'))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
102
 
103
  def display_semantic_results(result, lang_code, semantic_t):
104
  """
@@ -115,7 +136,11 @@ def display_semantic_results(result, lang_code, semantic_t):
115
  analysis = result['analysis']
116
 
117
  # Mostrar conceptos clave
118
- with st.expander(semantic_t.get('key_concepts', 'Key Concepts'), expanded=True):
 
 
 
 
119
  concept_text = " | ".join([
120
  f"{concept} ({frequency:.2f})"
121
  for concept, frequency in analysis['key_concepts']
@@ -123,26 +148,42 @@ def display_semantic_results(result, lang_code, semantic_t):
123
  st.write(concept_text)
124
 
125
  # Mostrar gr谩fico de relaciones conceptuales
126
- with st.expander(semantic_t.get('conceptual_relations', 'Conceptual Relations'), expanded=True):
 
 
 
 
127
  st.image(analysis['concept_graph'])
128
 
129
  # Mostrar gr谩fico de entidades
130
- with st.expander(semantic_t.get('entity_relations', 'Entity Relations'), expanded=True):
 
 
 
 
131
  st.image(analysis['entity_graph'])
132
 
133
  # Mostrar entidades identificadas
134
  if 'entities' in analysis:
135
- with st.expander(semantic_t.get('identified_entities', 'Identified Entities'), expanded=True):
 
 
 
 
136
  for entity_type, entities in analysis['entities'].items():
137
  st.subheader(entity_type)
138
  st.write(", ".join(entities))
139
 
140
  # Bot贸n de exportaci贸n
141
- if st.button(semantic_t.get('export_button', 'Export Analysis')):
 
 
 
142
  pdf_buffer = export_user_interactions(st.session_state.username, 'semantic')
143
  st.download_button(
144
  label=semantic_t.get('download_pdf', 'Download PDF'),
145
  data=pdf_buffer,
146
  file_name="semantic_analysis.pdf",
147
- mime="application/pdf"
 
148
  )
 
17
 
18
  from ..utils.widget_utils import generate_unique_key
19
  from ..database.semantic_mongo_db import store_student_semantic_result
20
+ from ..database.semantics_export import export_user_interactions
21
 
22
  import logging
23
  logger = logging.getLogger(__name__)
 
30
  nlp_models: Modelos de spaCy cargados
31
  semantic_t: Diccionario de traducciones sem谩nticas
32
  """
33
+ try:
34
+ # Inicializar el estado de la entrada
35
+ input_key = f"semantic_input_{lang_code}"
36
+ if input_key not in st.session_state:
37
+ st.session_state[input_key] = ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
 
39
+ # Inicializar contador de an谩lisis si no existe
40
+ if 'semantic_analysis_counter' not in st.session_state:
41
+ st.session_state.semantic_analysis_counter = 0
42
+
43
+ # Campo de entrada de texto
44
+ text_input = st.text_area(
45
+ semantic_t.get('text_input_label', 'Enter text to analyze'),
46
+ height=150,
47
+ placeholder=semantic_t.get('text_input_placeholder', 'Enter your text here...'),
48
+ value=st.session_state[input_key],
49
+ key=generate_unique_key("semantic", "text_area")
50
  )
51
+
52
+ # Opci贸n para cargar archivo
53
+ uploaded_file = st.file_uploader(
54
+ semantic_t.get('file_uploader', 'Or upload a text file'),
55
+ type=['txt'],
56
+ key=generate_unique_key("semantic", "file_uploader")
57
+ )
58
+
59
+ # Bot贸n de an谩lisis
60
+ analyze_button = st.button(
61
+ semantic_t.get('analyze_button', 'Analyze text'),
62
+ key=generate_unique_key("semantic", "analyze_button")
63
+ )
64
+
65
+ if analyze_button:
66
+ if text_input or uploaded_file is not None:
67
+ try:
68
+ with st.spinner(semantic_t.get('processing', 'Processing...')):
69
+ # Obtener el texto a analizar
70
+ text_content = uploaded_file.getvalue().decode('utf-8') if uploaded_file else text_input
71
+
72
+ # Realizar el an谩lisis
73
+ analysis_result = process_semantic_input(
74
+ text_content,
75
+ lang_code,
76
+ nlp_models,
77
+ semantic_t
78
+ )
79
+
80
+ if analysis_result['success']:
81
+ # Guardar resultado en el estado de la sesi贸n
82
+ st.session_state.semantic_result = analysis_result
83
+ st.session_state.semantic_analysis_counter += 1
84
+
85
+ # Mostrar resultados
86
+ display_semantic_results(
87
+ analysis_result,
88
+ lang_code,
89
+ semantic_t
90
+ )
91
+
92
+ # Guardar en la base de datos
93
+ if store_student_semantic_result(
94
+ st.session_state.username,
95
+ text_content,
96
+ analysis_result['analysis']
97
+ ):
98
+ st.success(semantic_t.get('success_message', 'Analysis saved successfully'))
99
+ else:
100
+ st.error(semantic_t.get('error_message', 'Error saving analysis'))
101
+ else:
102
+ st.error(analysis_result['message'])
103
+
104
+ except Exception as e:
105
+ logger.error(f"Error en an谩lisis sem谩ntico: {str(e)}")
106
+ st.error(semantic_t.get('error_processing', f'Error processing text: {str(e)}'))
107
+ else:
108
+ st.warning(semantic_t.get('warning_message', 'Please enter text or upload a file'))
109
+
110
+ # Si no se presion贸 el bot贸n, verificar si hay resultados previos
111
+ elif 'semantic_result' in st.session_state and st.session_state.semantic_result is not None:
112
+ display_semantic_results(
113
+ st.session_state.semantic_result,
114
+ lang_code,
115
+ semantic_t
116
+ )
117
+ else:
118
+ st.info(semantic_t.get('initial_message', 'Enter text to begin analysis'))
119
+
120
+ except Exception as e:
121
+ logger.error(f"Error general en interfaz sem谩ntica: {str(e)}")
122
+ st.error("Se produjo un error. Por favor, intente de nuevo.")
123
 
124
  def display_semantic_results(result, lang_code, semantic_t):
125
  """
 
136
  analysis = result['analysis']
137
 
138
  # Mostrar conceptos clave
139
+ with st.expander(
140
+ semantic_t.get('key_concepts', 'Key Concepts'),
141
+ expanded=True,
142
+ key=generate_unique_key("semantic", "key_concepts_expander")
143
+ ):
144
  concept_text = " | ".join([
145
  f"{concept} ({frequency:.2f})"
146
  for concept, frequency in analysis['key_concepts']
 
148
  st.write(concept_text)
149
 
150
  # Mostrar gr谩fico de relaciones conceptuales
151
+ with st.expander(
152
+ semantic_t.get('conceptual_relations', 'Conceptual Relations'),
153
+ expanded=True,
154
+ key=generate_unique_key("semantic", "concept_graph_expander")
155
+ ):
156
  st.image(analysis['concept_graph'])
157
 
158
  # Mostrar gr谩fico de entidades
159
+ with st.expander(
160
+ semantic_t.get('entity_relations', 'Entity Relations'),
161
+ expanded=True,
162
+ key=generate_unique_key("semantic", "entity_graph_expander")
163
+ ):
164
  st.image(analysis['entity_graph'])
165
 
166
  # Mostrar entidades identificadas
167
  if 'entities' in analysis:
168
+ with st.expander(
169
+ semantic_t.get('identified_entities', 'Identified Entities'),
170
+ expanded=True,
171
+ key=generate_unique_key("semantic", "entities_expander")
172
+ ):
173
  for entity_type, entities in analysis['entities'].items():
174
  st.subheader(entity_type)
175
  st.write(", ".join(entities))
176
 
177
  # Bot贸n de exportaci贸n
178
+ if st.button(
179
+ semantic_t.get('export_button', 'Export Analysis'),
180
+ key=generate_unique_key("semantic", "export_button")
181
+ ):
182
  pdf_buffer = export_user_interactions(st.session_state.username, 'semantic')
183
  st.download_button(
184
  label=semantic_t.get('download_pdf', 'Download PDF'),
185
  data=pdf_buffer,
186
  file_name="semantic_analysis.pdf",
187
+ mime="application/pdf",
188
+ key=generate_unique_key("semantic", "download_button")
189
  )