Spaces:
Running
Running
File size: 2,880 Bytes
c58df45 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 |
import logging
import io
import base64
import matplotlib.pyplot as plt
from ..text_analysis.semantic_analysis import perform_semantic_analysis
from .flexible_analysis_handler import FlexibleAnalysisHandler
logger = logging.getLogger(__name__)
def encode_image_to_base64(image_data):
if isinstance(image_data, str): # Si es una ruta de archivo
with open(image_data, "rb") as image_file:
encoded_string = base64.b64encode(image_file.read()).decode("utf-8")
elif isinstance(image_data, bytes): # Si son datos de imagen en memoria
encoded_string = base64.b64encode(image_data).decode("utf-8")
else:
raise ValueError("Invalid image data type. Expected string (file path) or bytes.")
return encoded_string #
def process_semantic_analysis(file_contents, nlp_model, lang_code):
logger.info(f"Starting semantic analysis processing for language: {lang_code}")
try:
result = perform_semantic_analysis(file_contents, nlp_model, lang_code)
#handler = FlexibleAnalysisHandler(result)
#concept_graph = handler.get_graph('concept_graph')
#entity_graph = handler.get_graph('entity_graph')
#key_concepts = handler.get_key_concepts()
concept_graph = result['concept_graph']
entity_graph = result['entity_graph']
key_concepts = result['key_concepts']
# Convertir los gráficos a base64
concept_graph_base64 = fig_to_base64(concept_graph) if concept_graph else None
entity_graph_base64 = fig_to_base64(entity_graph) if entity_graph else None
logger.info("Semantic analysis processing completed successfully")
return concept_graph_base64, entity_graph_base64, key_concepts
except Exception as e:
logger.error(f"Error in semantic analysis processing: {str(e)}")
return None, None, [] # Retorna valores vacíos en caso de error
'''
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)
def process_semantic_analysis(file_contents, nlp_model, lang_code):
logger.info(f"Starting semantic analysis for language: {lang_code}")
try:
logger.debug("Calling perform_semantic_analysis")
result = perform_semantic_analysis(file_contents, nlp_model, lang_code)
logger.debug(f"Result keys: {result.keys()}")
logger.debug(f"Type of concept_graph: {type(result['concept_graph'])}")
logger.debug(f"Type of entity_graph: {type(result['entity_graph'])}")
logger.debug(f"Number of key_concepts: {len(result['key_concepts'])}")
logger.info("Semantic analysis completed successfully")
return result['concept_graph'], result['entity_graph'], result['key_concepts']
except Exception as e:
logger.error(f"Error in semantic analysis: {str(e)}")
raise
''' |