Spaces:
Build error
Build error
File size: 1,920 Bytes
e6dc0b7 f3aae5e 94cf579 e6dc0b7 3ab7fbb e6dc0b7 f5b51a2 3ab7fbb 94cf579 53bca84 e6dc0b7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
import streamlit as st
from google.cloud import language_v1
import json
from google.oauth2 import service_account
import os
from google.oauth2 import service_account
from google.cloud import language_v1
def sample_analyze_entities(text_content):
# Read the JSON credentials from st.secrets
service_account_info = json.loads(st.secrets["google_nlp"]["credentials"])
# Create credentials
credentials = service_account.Credentials.from_service_account_info(
service_account_info, scopes=["https://www.googleapis.com/auth/cloud-platform"]
)
# Initialize the LanguageServiceClient with the credentials
client = language_v1.LanguageServiceClient(credentials=credentials)
# Your provided function
client = language_v1.LanguageServiceClient()
type_ = language_v1.Document.Type.PLAIN_TEXT
language = "en"
document = {"content": text_content, "type_": type_, "language": language}
encoding_type = language_v1.EncodingType.UTF8
response = client.analyze_entities(request={"document": document, "encoding_type": encoding_type})
for entity in response.entities:
st.write(f"Representative name for the entity: {entity.name}")
st.write(f"Entity type: {language_v1.Entity.Type(entity.type_).name}")
st.write(f"Salience score: {entity.salience}")
for metadata_name, metadata_value in entity.metadata.items():
st.write(f"{metadata_name}: {metadata_value}")
for mention in entity.mentions:
st.write(f"Mention text: {mention.text.content}")
st.write(f"Mention type: {language_v1.EntityMention.Type(mention.type_).name}")
st.write(f"Language of the text: {response.language}")
# Streamlit UI
st.title('Google Cloud NLP Entity Analyzer')
user_input = st.text_area('Enter text to analyze', '')
if st.button('Analyze'):
if user_input:
sample_analyze_entities(user_input)
|