import streamlit as st from google.oauth2 import service_account from google.cloud import language_v1 import json import os st.write(f"Debug: {type(st.secrets['google_nlp'])} - {st.secrets['google_nlp']}") st.write(f"Debug: {type(st.secrets['google_nlp']['credentials'])} - {st.secrets['google_nlp']['credentials']}") def sample_analyze_entities(text_content): # Directly read the JSON credentials from st.secrets service_account_info = st.secrets["google_nlp"]["credentials"] # If the credentials are already in dict form, no need to load from JSON if not isinstance(service_account_info, dict): service_account_info = json.loads(service_account_info) # Create credentials credentials = service_account.Credentials.from_service_account_info( service_account_info, scopes=["https://www.googleapis.com/auth/cloud-platform"] ) # Initialize the LanguageServiceClient with the credentials client = language_v1.LanguageServiceClient(credentials=credentials) # Your provided function client = language_v1.LanguageServiceClient() type_ = language_v1.Document.Type.PLAIN_TEXT language = "en" document = {"content": text_content, "type_": type_, "language": language} encoding_type = language_v1.EncodingType.UTF8 response = client.analyze_entities(request={"document": document, "encoding_type": encoding_type}) for entity in response.entities: st.write(f"Representative name for the entity: {entity.name}") st.write(f"Entity type: {language_v1.Entity.Type(entity.type_).name}") st.write(f"Salience score: {entity.salience}") for metadata_name, metadata_value in entity.metadata.items(): st.write(f"{metadata_name}: {metadata_value}") for mention in entity.mentions: st.write(f"Mention text: {mention.text.content}") st.write(f"Mention type: {language_v1.EntityMention.Type(mention.type_).name}") st.write(f"Language of the text: {response.language}") # Streamlit UI st.title('Google Cloud NLP Entity Analyzer') user_input = st.text_area('Enter text to analyze', '') if st.button('Analyze'): if user_input: sample_analyze_entities(user_input)