Spaces:
Sleeping
Sleeping
File size: 3,306 Bytes
564ce0c 569a26f 564ce0c 8c32010 a88adef c2b8ffb 569a26f c2b8ffb 569a26f c2b8ffb 569a26f c2b8ffb 9502681 c2b8ffb 9502681 c2b8ffb 9502681 c2b8ffb 9502681 a88adef 9502681 f3178b1 a88adef 9ddc9bf a88adef 9ddc9bf 7c393ac c2b8ffb 569a26f c2b8ffb 569a26f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 |
import json
import streamlit as st
from google.oauth2 import service_account
from google.cloud import language_v1
# Adding checkbox options for entity types
entity_types_to_show = [
"UNKNOWN", "PERSON", "LOCATION", "ORGANIZATION", "EVENT", "WORK_OF_ART", "CONSUMER_GOOD", "OTHER"
]
selected_types = st.multiselect('Select entity types to show:', entity_types_to_show)
# Header and intro
st.title("Google Cloud NLP Entity Analyzer")
st.write("## Introduction to the Knowledge Graph API")
st.write("---")
st.write("""
The Google Knowledge Graph API reveals entity information related to a keyword, that Google knows about.
This information can be very useful for SEO – discovering related topics and what Google believes is relevant.
It can also help when trying to claim/win a Knowledge Graph box on search results.
The API requires a high level of technical understanding, so this tool creates a simple public interface, with the ability to export data into spreadsheets.
""")
def sample_analyze_entities(text_content, your_query=""):
# Parse the JSON string to a dictionary
service_account_info = json.loads(st.secrets["google_nlp"])
# Create credentials
credentials = service_account.Credentials.from_service_account_info(
service_account_info, scopes=["https://www.googleapis.com/auth/cloud-platform"]
)
# Initialize the LanguageServiceClient with the credentials
client = language_v1.LanguageServiceClient(credentials=credentials)
# NLP analysis
type_ = language_v1.Document.Type.PLAIN_TEXT
language = "en"
document = {"content": text_content, "type_": type_, "language": language}
encoding_type = language_v1.EncodingType.UTF8
response = client.analyze_entities(request={"document": document, "encoding_type": encoding_type})
# Create an empty list to hold the results
entities_list = []
# ... (rest of your code above)
for entity in response.entities:
entity_type_name = language_v1.Entity.Type(entity.type_).name
if not selected_types or entity_type_name in selected_types:
entity_details = {
"Name": entity.name,
"Type": entity_type_name,
"Salience Score": entity.salience,
"Metadata": entity.metadata,
"Mentions": [mention.text.content for mention in entity.mentions]
}
entities_list.append(entity_details)
# ... (rest of your code below)
if your_query:
st.write(f"We found {len(entities_list)} results for your query of **{your_query}**")
else:
st.write(f"We found {len(entities_list)} results for your query")
for entity in entities_list:
st.write(f"**Name**: {entity['Name']}")
st.write(f"**Type**: {entity['Type']}")
st.write(f"**Salience Score**: {entity['Salience Score']}")
if entity["Metadata"]:
st.write("**Metadata**: ")
st.json(entity["Metadata"])
if entity["Mentions"]:
st.write("**Mentions**: ")
st.json(entity["Mentions"])
# User input for text analysis
user_input = st.text_area("Enter text to analyze")
your_query = st.text_input("Enter your query (optional)")
if st.button("Analyze"):
sample_analyze_entities(user_input, your_query)
|