File size: 2,271 Bytes
564ce0c
569a26f
564ce0c
 
 
c077e58
 
170f624
c077e58
 
 
170f624
 
 
c077e58
 
 
 
170f624
e054f20
170f624
 
c077e58
 
 
 
170f624
c077e58
170f624
c077e58
 
 
 
170f624
 
bc4e0d2
170f624
 
 
 
 
 
 
d57d7e1
170f624
c077e58
170f624
d57d7e1
170f624
 
 
f66f708
c2b8ffb
170f624
569a26f
c2b8ffb
170f624
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import json
import streamlit as st
from google.oauth2 import service_account
from google.cloud import language_v1

# Sidebar content
st.sidebar.title("About This Tool")
st.sidebar.markdown("This tool leverages Google's NLP technology for entity analysis.")
st.sidebar.markdown("### Step-by-Step Guide")
st.sidebar.markdown("""
1. **Open the Tool**: Navigate to the URL where the tool is hosted.
2. **User Input**: Enter the text you want to analyze.
3. **Analyze**: Click the 'Analyze' button.
4. **View Results**: See the identified entities and their details.
""")

# Header and intro
st.title("Google Cloud NLP Entity Analyzer")
st.write("This tool analyzes text to identify entities such as people, locations, organizations, and events.")

def sample_analyze_entities(text_content):
    # Assuming service_account_info is set in your Streamlit secrets
    service_account_info = json.loads(st.secrets["google_nlp"])
    credentials = service_account.Credentials.from_service_account_info(
        service_account_info, scopes=["https://www.googleapis.com/auth/cloud-platform"]
    )
    
    client = language_v1.LanguageServiceClient(credentials=credentials)
    document = {"content": text_content, "type_": language_v1.Document.Type.PLAIN_TEXT, "language": "en"}
    encoding_type = language_v1.EncodingType.UTF8

    response = client.analyze_entities(request={"document": document, "encoding_type": encoding_type})

    st.write(f"### We found {len(response.entities)} entities")
    st.write("---")

    for i, entity in enumerate(response.entities):
        st.write(f"Entity {i+1} of {len(response.entities)}")
        st.write(f"Name: {entity.name}")
        st.write(f"Type: {language_v1.Entity.Type(entity.type_).name}")
        st.write(f"Salience Score: {entity.salience}")
        
        if entity.metadata:
            st.write("Metadata:")
            st.write(entity.metadata)

        if entity.mentions:
            st.write("Mentions:")
            st.write(', '.join([mention.text.content for mention in entity.mentions]))
        
        st.write("---")

# User input for text analysis
user_input = st.text_area("Enter text to analyze", max_chars=2500)

if st.button("Analyze"):
    if user_input:
        sample_analyze_entities(user_input)