Spaces:
Sleeping
Sleeping
File size: 4,229 Bytes
3e996d0 e6ac219 d24c8e2 932e360 1a3c876 0710745 8e58a19 0710745 d24c8e2 8a09b1d e2bf5cc d24c8e2 e2bf5cc 8e58a19 d24c8e2 8a09b1d 5e73404 8a09b1d 036ebf1 e6ac219 036ebf1 0710745 d24c8e2 036ebf1 0710745 37a155f d24c8e2 37a155f e2bf5cc 8a09b1d e2bf5cc 8a09b1d e2bf5cc 37a155f fce2a17 ef76fa0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 |
import streamlit as st
import pandas as pd
from apify_client import ApifyClient
# Function to fetch Google Maps info using the updated actor_
def fetch_google_maps_info(website_name):
apify_client = ApifyClient("apify_api_uz0y556N4IG2aLcESj67kmnGSUpHF12XAkLp")
run_input = {"searchStringsArray": [website_name]}
run = apify_client.actor("nwua9Gu5YrADL7ZDj").call(run_input=run_input)
items = list(apify_client.dataset(run["defaultDatasetId"]).iterate_items())
return items[0] if items else None
# Function to fetch weather info from OpenWeatherMap API
def fetch_weather_info(lat, lon):
API_KEY = "91b23cab82ee530b2052c8757e343b0d"
url = f"https://api.openweathermap.org/data/3.0/onecall?lat={lat}&lon={lon}&exclude=hourly,daily&appid={API_KEY}"
response = requests.get(url)
return response.json()
# Function to fetch website content using the updated actor
def fetch_website_content(website_url):
apify_client = ApifyClient("apify_api_uz0y556N4IG2aLcESj67kmnGSUpHF12XAkLp")
run_input = {}
run = apify_client.actor("moJRLRc85AitArpNN").call(run_input=run_input)
items = list(apify_client.dataset(run["defaultDatasetId"]).iterate_items())
return items if items else None
# Function to fetch reviews using the new actor
def fetch_customer_reviews(google_maps_url):
apify_client = ApifyClient("apify_api_uz0y556N4IG2aLcESj67kmnGSUpHF12XAkLp")
run_input = {
"startUrls": [{"url": google_maps_url}],
"maxReviews": 20,
"reviewsSort": "newest",
"language": "en",
"personalData": True,
}
run = apify_client.actor("Xb8osYTtOjlsgI6k9").call(run_input=run_input)
items = list(apify_client.dataset(run["defaultDatasetId"]).iterate_items())
return items
# Streamlit app for Data Visualization
st.title("Data Visualization")
# Input for website or company name
website_name = st.text_input("Enter a website / company name:")
if website_name:
# Initialize the progress bar
progress_bar = st.progress(0)
# Fetch Google Maps data
google_maps_data = fetch_google_maps_info(website_name)
progress_bar.progress(50)
if google_maps_data:
# Display website link
website_link = google_maps_data.get('website')
st.text_area("Website Link:", website_link)
# Display location and fetch weather info
lat = google_maps_data["location"]["lat"]
lng = google_maps_data["location"]["lng"]
st.map(pd.DataFrame({'lat': [lat], 'lon': [lng]}))
weather_data = fetch_weather_info(lat, lng)
current_weather = weather_data.get("current", {})
temp = current_weather.get('temp')
temp_in_celsius = temp - 273.15
st.write(f"**Location:** {lat}, {lng}")
st.write(f"**Temperature:** {temp_in_celsius:.2f}°C")
st.write(f"**Weather:** {current_weather.get('weather')[0].get('description')}")
# Display Occupancy Data
st.subheader("Occupancy Data")
occupancy_data = google_maps_data.get('popularTimesHistogram', {})
for day, day_data in occupancy_data.items():
hours = [entry['hour'] for entry in day_data]
occupancy = [entry['occupancyPercent'] for entry in day_data]
st.write(day)
st.bar_chart(pd.Series(occupancy, index=hours))
# Fetch customer reviews from the new API
google_maps_url = google_maps_data.get('url')
reviews_data = fetch_customer_reviews(google_maps_url)
progress_bar.progress(75)
# Display the reviews from the new API
reviews_df = pd.DataFrame(reviews_data)[['text']]
st.subheader("Customer Reviews from New API")
st.table(reviews_df)
# Fetch and Display Website Content
st.subheader("Website Content")
website_content_data = fetch_website_content(website_link)
progress_bar.progress(100)
if website_content_data:
website_df = pd.DataFrame(website_content_data)
st.table(website_df)
else:
st.write("Unable to retrieve website content.")
else:
st.write("No results found for this website / company name on Google Maps.") |