blazingbunny's picture
Update app.py
6258650
raw
history blame
1.96 kB
import json
from google.oauth2 import service_account
from google.cloud import language_v1
import streamlit as st
def sample_analyze_entities(text_content):
# Parse the JSON string to a dictionary
service_account_info = json.loads(st.secrets["google_nlp"])
# Create credentials
credentials = service_account.Credentials.from_service_account_info(
service_account_info, scopes=["https://www.googleapis.com/auth/cloud-platform"]
)
# Debug: Print or log the type and content of credentials
st.write(f"Debug - Type of credentials: {type(credentials)}")
st.write(f"Debug - Content of credentials: {credentials}")
# Initialize the LanguageServiceClient with the credentials
client = language_v1.LanguageServiceClient(credentials=credentials)
# Your provided function
type_ = language_v1.Document.Type.PLAIN_TEXT
language = "en"
document = {"content": text_content, "type_": type_, "language": language}
encoding_type = language_v1.EncodingType.UTF8
response = client.analyze_entities(request={"document": document, "encoding_type": encoding_type})
for entity in response.entities:
st.write(f"Representative name for the entity: {entity.name}")
st.write(f"Entity type: {language_v1.Entity.Type(entity.type_).name}")
st.write(f"Salience score: {entity.salience}")
for metadata_name, metadata_value in entity.metadata.items():
st.write(f"{metadata_name}: {metadata_value}")
for mention in entity.mentions:
st.write(f"Mention text: {mention.text.content}")
st.write(f"Mention type: {language_v1.EntityMention.Type(mention.type_).name}")
st.write(f"Language of the text: {response.language}")
# Streamlit UI
st.title('Google Cloud NLP Entity Analyzer')
user_input = st.text_area('Enter text to analyze', '')
if st.button('Analyze'):
if user_input:
sample_analyze_entities(user_input)