File size: 2,603 Bytes
564ce0c
569a26f
564ce0c
 
8041b5b
564ce0c
8041b5b
6038ff0
 
 
 
 
 
 
 
 
 
 
8c32010
c2b8ffb
569a26f
c2b8ffb
 
6038ff0
c2b8ffb
569a26f
6038ff0
9502681
6038ff0
9502681
 
 
dc5c795
 
 
 
 
 
 
6038ff0
9502681
015a0a7
6038ff0
 
 
 
 
 
 
 
9502681
9ddc9bf
bc4e0d2
9ddc9bf
bc4e0d2
 
 
 
6038ff0
 
8041b5b
6038ff0
8041b5b
6038ff0
8041b5b
015a0a7
bc4e0d2
c2b8ffb
 
569a26f
 
c2b8ffb
569a26f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
import json
import streamlit as st
from google.oauth2 import service_account
from google.cloud import language_v1
import requests

# Function for querying Google Knowledge Graph API
def query_google_knowledge_graph(api_key, entity_name):
    query = entity_name
    service_url = "https://kgsearch.googleapis.com/v1/entities:search"
    params = {
        'query': query,
        'limit': 1,
        'indent': True,
        'key': api_key,
    }
    response = requests.get(service_url, params=params)
    return response.json()

# Header and intro
st.title("Google Cloud NLP Entity Analyzer")
st.write("## Introduction to the Knowledge Graph API")
st.write("---")
# ... (your intro text here)

def sample_analyze_entities(text_content, your_query=""):
    api_key = json.loads(st.secrets["google_nlp"])  # The key is the same for both APIs
    credentials = service_account.Credentials.from_service_account_info(
        api_key, scopes=["https://www.googleapis.com/auth/cloud-platform"]
    )
    client = language_v1.LanguageServiceClient(credentials=credentials)

    type_ = language_v1.Document.Type.PLAIN_TEXT
    language = "en"
    document = {"content": text_content, "type_": type_, "language": language}
    encoding_type = language_v1.EncodingType.UTF8
    response = client.analyze_entities(request={"document": document, "encoding_type": encoding_type})


    # ... (rest of your NLP code)
    entities_list = []
    for entity in response.entities:
        entity_details = {
            "Name": entity.name,
            "Type": language_v1.Entity.Type(entity.type_).name,
            "Salience Score": entity.salience,
            "Metadata": entity.metadata,
            "Mentions": [mention.text.content for mention in entity.mentions]
        }
        entities_list.append(entity_details)

    if your_query:
        st.write(f"### We found {len(entities_list)} results for your query of **{your_query}**")
    else:
        st.write("### We found results for your query")

    st.write("----")
    for i, entity in enumerate(entities_list):
        # ... (your existing entity display code)

        # Query Google Knowledge Graph API for each entity
        kg_info = query_google_knowledge_graph(api_key, entity['Name'])
        st.write("### Google Knowledge Graph Information")
        st.json(kg_info)  # Display the JSON response

        st.write("----")

# User input for text analysis
user_input = st.text_area("Enter text to analyze")
your_query = st.text_input("Enter your query (optional)")

if st.button("Analyze"):
    sample_analyze_entities(user_input, your_query)