blazingbunny's picture
Update app.py
470e7a0
raw
history blame
3.33 kB
import json
import streamlit as st
from google.oauth2 import service_account
from google.cloud import language_v1
# Header and intro
st.title("Google Cloud NLP Entity Analyzer")
st.write("## Introduction to the Knowledge Graph API")
st.write("---")
st.write("""
The Google Knowledge Graph API reveals entity information related to a keyword, that Google knows about.
This information can be very useful for SEO – discovering related topics and what Google believes is relevant.
It can also help when trying to claim/win a Knowledge Graph box on search results.
The API requires a high level of technical understanding, so this tool creates a simple public interface, with the ability to export data into spreadsheets.
""")
def sample_analyze_entities(text_content, your_query=""):
# Parse the JSON string to a dictionary
service_account_info = json.loads(st.secrets["google_nlp"])
# Create credentials
credentials = service_account.Credentials.from_service_account_info(
service_account_info, scopes=["https://www.googleapis.com/auth/cloud-platform"]
)
# Initialize the LanguageServiceClient with the credentials
client = language_v1.LanguageServiceClient(credentials=credentials)
# NLP analysis
type_ = language_v1.Document.Type.PLAIN_TEXT
language = "en"
document = {"content": text_content, "type_": type_, "language": language}
encoding_type = language_v1.EncodingType.UTF8
response = client.analyze_entities(request={"document": document, "encoding_type": encoding_type})
# Create an empty list to hold the results
entities_list = []
for entity in response.entities:
# Create a dictionary to hold individual entity details
entity_details = {
"Name": entity.name,
"Type": language_v1.Entity.Type(entity.type_).name,
"Salience Score": entity.salience,
"Metadata": [],
"Mentions": []
}
for metadata_name, metadata_value in entity.metadata.items():
entity_details["Metadata"].append({metadata_name: metadata_value})
for mention in entity.mentions:
entity_details["Mentions"].append({
"Text": mention.text.content,
"Type": language_v1.EntityMention.Type(mention.type_).name
})
# Append the dictionary to the list
entities_list.append(entity_details)
# Streamlit UI
if your_query:
st.write(f"### We found {len(entities_list)} results for your query of **{your_query}**")
else:
st.write("### We found results for your query")
st.write("----")
for i, entity in enumerate(entities_list):
st.write(f"Relevance Score: {entity.get('Salience Score', 'N/A')} \t {i+1} of {len(entities_list)}")
# Display all key-value pairs in the entity dictionary
for key, value in entity.items():
if value:
st.write(f"**{key}:**")
st.json(value)
st.write("----")
st.write(f"### Language of the text: {response.language}")
# User input for text analysis
user_input = st.text_area("Enter text to analyze")
your_query = st.text_input("Enter your query (optional)")
if st.button("Analyze"):
sample_analyze_entities(user_input, your_query)