blazingbunny commited on
Commit
170f624
·
1 Parent(s): e054f20

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -52
app.py CHANGED
@@ -5,80 +5,54 @@ from google.cloud import language_v1
5
 
6
  # Sidebar content
7
  st.sidebar.title("About This Tool")
8
- st.sidebar.markdown("### Descriptive Introduction")
9
- st.sidebar.markdown
10
  st.sidebar.markdown("### Step-by-Step Guide")
11
  st.sidebar.markdown("""
12
  1. **Open the Tool**: Navigate to the URL where the tool is hosted.
13
- 2. **User Input**:
14
- - **Text to Analyze**: In the text area labeled "Enter text to analyze", paste or type the text you want to analyze.
15
- - **Query**: Optionally, you can also enter a specific query in the text input field labeled "Enter your query (optional)". This is for your own reference.
16
- - **Analyze**: Click the button labeled "Analyze". The tool will then process the text and perform entity analysis on it.
17
- 3. **View Results**:
18
- - After the analysis is complete, you'll see a section that says, "We found X results for your query of your_query" (or just "We found results for your query" if no query was entered).
19
- - Below this, you'll find a line-by-line breakdown of each entity identified in the text.
20
  """)
21
 
22
  # Header and intro
23
  st.title("Google Cloud NLP Entity Analyzer")
24
- st.write("""The "Google Cloud NLP Entity Analyzer" is a powerful tool designed to analyze text and identify various types of entities such as people, locations, organizations, and events. Leveraging Google's Natural Language Processing (NLP) technology, this tool provides insights into how Google understands text, which can be particularly useful for Search Engine Optimization (SEO) efforts. It also serves as an interface to the Google Knowledge Graph API, providing additional contextual information about the identified entities.""")
25
 
26
-
27
-
28
- def sample_analyze_entities(text_content, your_query=""):
29
  service_account_info = json.loads(st.secrets["google_nlp"])
30
  credentials = service_account.Credentials.from_service_account_info(
31
  service_account_info, scopes=["https://www.googleapis.com/auth/cloud-platform"]
32
  )
 
33
  client = language_v1.LanguageServiceClient(credentials=credentials)
34
- type_ = language_v1.Document.Type.PLAIN_TEXT
35
- language = "en"
36
- document = {"content": text_content, "type_": type_, "language": language}
37
  encoding_type = language_v1.EncodingType.UTF8
38
 
39
  response = client.analyze_entities(request={"document": document, "encoding_type": encoding_type})
40
 
41
- entities_list = []
42
- for entity in response.entities:
43
- entity_details = {
44
- "Name": entity.name,
45
- "Type": language_v1.Entity.Type(entity.type_).name,
46
- "Salience Score": entity.salience,
47
- "Metadata": entity.metadata,
48
- "Mentions": [mention.text.content for mention in entity.mentions]
49
- }
50
- entities_list.append(entity_details)
51
-
52
- if your_query:
53
- st.write(f"### We found {len(entities_list)} results for your query of **{your_query}**")
54
- else:
55
- st.write("### We found results for your query")
56
 
57
- st.write("----")
58
- for i, entity in enumerate(entities_list):
59
- st.write(f"Entity {i+1} of {len(entities_list)}")
60
- st.write(f"Relevance Score: {round(entity.get('Salience Score', 0) * 100)}%")
61
- st.write(f"Name: {entity.get('Name', 'N/A')}")
62
- st.write(f"Type: {entity.get('Type', 'N/A')}")
63
- st.write(f"Salience Score: {entity.get('Salience Score', 'N/A')}")
64
-
65
- metadata = entity.get('Metadata', {})
66
- if metadata:
67
  st.write("Metadata:")
68
- st.write(metadata)
69
 
70
- mentions = entity.get('Mentions', [])
71
- if mentions:
72
  st.write("Mentions:")
73
- st.write(json.dumps(mentions))
74
-
75
- st.write("----")
76
-
77
- st.write(f"### Language of the text: {response.language}")
78
 
79
  # User input for text analysis
80
- user_input = st.text_area("Enter text to analyze")
81
- #your_query = st.text_input("Enter your query (optional)")
82
 
83
  if st.button("Analyze"):
84
- sample_analyze_entities(user_input)
 
 
5
 
6
  # Sidebar content
7
  st.sidebar.title("About This Tool")
8
+ st.sidebar.markdown("This tool leverages Google's NLP technology for entity analysis.")
 
9
  st.sidebar.markdown("### Step-by-Step Guide")
10
  st.sidebar.markdown("""
11
  1. **Open the Tool**: Navigate to the URL where the tool is hosted.
12
+ 2. **User Input**: Enter the text you want to analyze.
13
+ 3. **Analyze**: Click the 'Analyze' button.
14
+ 4. **View Results**: See the identified entities and their details.
 
 
 
 
15
  """)
16
 
17
  # Header and intro
18
  st.title("Google Cloud NLP Entity Analyzer")
19
+ st.write("This tool analyzes text to identify entities such as people, locations, organizations, and events.")
20
 
21
+ def sample_analyze_entities(text_content):
22
+ # Assuming service_account_info is set in your Streamlit secrets
 
23
  service_account_info = json.loads(st.secrets["google_nlp"])
24
  credentials = service_account.Credentials.from_service_account_info(
25
  service_account_info, scopes=["https://www.googleapis.com/auth/cloud-platform"]
26
  )
27
+
28
  client = language_v1.LanguageServiceClient(credentials=credentials)
29
+ document = {"content": text_content, "type_": language_v1.Document.Type.PLAIN_TEXT, "language": "en"}
 
 
30
  encoding_type = language_v1.EncodingType.UTF8
31
 
32
  response = client.analyze_entities(request={"document": document, "encoding_type": encoding_type})
33
 
34
+ st.write(f"### We found {len(response.entities)} entities")
35
+ st.write("---")
 
 
 
 
 
 
 
 
 
 
 
 
 
36
 
37
+ for i, entity in enumerate(response.entities):
38
+ st.write(f"Entity {i+1} of {len(response.entities)}")
39
+ st.write(f"Name: {entity.name}")
40
+ st.write(f"Type: {language_v1.Entity.Type(entity.type_).name}")
41
+ st.write(f"Salience Score: {entity.salience}")
42
+
43
+ if entity.metadata:
 
 
 
44
  st.write("Metadata:")
45
+ st.write(entity.metadata)
46
 
47
+ if entity.mentions:
 
48
  st.write("Mentions:")
49
+ st.write(', '.join([mention.text.content for mention in entity.mentions]))
50
+
51
+ st.write("---")
 
 
52
 
53
  # User input for text analysis
54
+ user_input = st.text_area("Enter text to analyze", max_chars=2500)
 
55
 
56
  if st.button("Analyze"):
57
+ if user_input:
58
+ sample_analyze_entities(user_input)