blazingbunny commited on
Commit
f66f708
·
1 Parent(s): dc5c795

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -34
app.py CHANGED
@@ -2,52 +2,49 @@ import json
2
  import streamlit as st
3
  from google.oauth2 import service_account
4
  from google.cloud import language_v1
5
- import requests
6
 
7
- # Function for querying Google Knowledge Graph API
8
- def query_google_knowledge_graph(api_key, entity_name):
9
- query = entity_name
10
- service_url = "https://kgsearch.googleapis.com/v1/entities:search"
11
- params = {
12
- 'query': query,
13
- 'limit': 1,
14
- 'indent': True,
15
- 'key': api_key,
16
- }
17
- response = requests.get(service_url, params=params)
18
- return response.json()
19
 
20
  # Header and intro
21
  st.title("Google Cloud NLP Entity Analyzer")
22
  st.write("## Introduction to the Knowledge Graph API")
23
  st.write("---")
24
- # ... (your intro text here)
 
 
 
 
 
25
 
26
  def sample_analyze_entities(text_content, your_query=""):
27
- api_key = json.loads(st.secrets["google_nlp"]) # The key is the same for both APIs
28
  credentials = service_account.Credentials.from_service_account_info(
29
- api_key, scopes=["https://www.googleapis.com/auth/cloud-platform"]
30
  )
31
  client = language_v1.LanguageServiceClient(credentials=credentials)
32
-
33
  type_ = language_v1.Document.Type.PLAIN_TEXT
34
  language = "en"
35
  document = {"content": text_content, "type_": type_, "language": language}
36
  encoding_type = language_v1.EncodingType.UTF8
37
- response = client.analyze_entities(request={"document": document, "encoding_type": encoding_type})
38
 
 
39
 
40
- # ... (rest of your NLP code)
41
  entities_list = []
42
  for entity in response.entities:
43
- entity_details = {
44
- "Name": entity.name,
45
- "Type": language_v1.Entity.Type(entity.type_).name,
46
- "Salience Score": entity.salience,
47
- "Metadata": entity.metadata,
48
- "Mentions": [mention.text.content for mention in entity.mentions]
49
- }
50
- entities_list.append(entity_details)
 
 
51
 
52
  if your_query:
53
  st.write(f"### We found {len(entities_list)} results for your query of **{your_query}**")
@@ -56,15 +53,18 @@ def sample_analyze_entities(text_content, your_query=""):
56
 
57
  st.write("----")
58
  for i, entity in enumerate(entities_list):
59
- # ... (your existing entity display code)
60
-
61
- # Query Google Knowledge Graph API for each entity
62
- kg_info = query_google_knowledge_graph(api_key, entity['Name'])
63
- st.write("### Google Knowledge Graph Information")
64
- st.json(kg_info) # Display the JSON response
65
-
 
66
  st.write("----")
67
 
 
 
68
  # User input for text analysis
69
  user_input = st.text_area("Enter text to analyze")
70
  your_query = st.text_input("Enter your query (optional)")
 
2
  import streamlit as st
3
  from google.oauth2 import service_account
4
  from google.cloud import language_v1
 
5
 
6
+ # Adding checkbox options for entity types
7
+ entity_types_to_show = [
8
+ "UNKNOWN", "PERSON", "LOCATION", "ORGANIZATION", "EVENT", "WORK_OF_ART", "CONSUMER_GOOD", "OTHER"
9
+ ]
10
+ selected_types = st.multiselect('Select entity types to show:', entity_types_to_show)
 
 
 
 
 
 
 
11
 
12
  # Header and intro
13
  st.title("Google Cloud NLP Entity Analyzer")
14
  st.write("## Introduction to the Knowledge Graph API")
15
  st.write("---")
16
+ st.write("""
17
+ The Google Knowledge Graph API reveals entity information related to a keyword, that Google knows about.
18
+ This information can be very useful for SEO – discovering related topics and what Google believes is relevant.
19
+ It can also help when trying to claim/win a Knowledge Graph box on search results.
20
+ The API requires a high level of technical understanding, so this tool creates a simple public interface, with the ability to export data into spreadsheets.
21
+ """)
22
 
23
  def sample_analyze_entities(text_content, your_query=""):
24
+ service_account_info = json.loads(st.secrets["google_nlp"])
25
  credentials = service_account.Credentials.from_service_account_info(
26
+ service_account_info, scopes=["https://www.googleapis.com/auth/cloud-platform"]
27
  )
28
  client = language_v1.LanguageServiceClient(credentials=credentials)
 
29
  type_ = language_v1.Document.Type.PLAIN_TEXT
30
  language = "en"
31
  document = {"content": text_content, "type_": type_, "language": language}
32
  encoding_type = language_v1.EncodingType.UTF8
 
33
 
34
+ response = client.analyze_entities(request={"document": document, "encoding_type": encoding_type})
35
 
 
36
  entities_list = []
37
  for entity in response.entities:
38
+ entity_type_name = language_v1.Entity.Type(entity.type_).name
39
+ if not selected_types or entity_type_name in selected_types:
40
+ entity_details = {
41
+ "Name": entity.name,
42
+ "Type": entity_type_name,
43
+ "Salience Score": entity.salience,
44
+ "Metadata": entity.metadata,
45
+ "Mentions": [mention.text.content for mention in entity.mentions]
46
+ }
47
+ entities_list.append(entity_details)
48
 
49
  if your_query:
50
  st.write(f"### We found {len(entities_list)} results for your query of **{your_query}**")
 
53
 
54
  st.write("----")
55
  for i, entity in enumerate(entities_list):
56
+ st.write(f"Relevance Score: {entity.get('Salience Score', 'N/A')} \t {i+1} of {len(entities_list)}")
57
+ for key, value in entity.items():
58
+ if value:
59
+ st.write(f"**{key}:**")
60
+ try:
61
+ st.json(value)
62
+ except Exception as e:
63
+ st.write(f"Error while displaying JSON: {e}")
64
  st.write("----")
65
 
66
+ st.write(f"### Language of the text: {response.language}")
67
+
68
  # User input for text analysis
69
  user_input = st.text_area("Enter text to analyze")
70
  your_query = st.text_input("Enter your query (optional)")