Spaces:
Sleeping
Sleeping
import streamlit as st | |
import pandas as pd | |
from apify_client import ApifyClient | |
# Function to fetch Google Maps info using the updated actor_ | |
def fetch_google_maps_info(website_name): | |
apify_client = ApifyClient("apify_api_uz0y556N4IG2aLcESj67kmnGSUpHF12XAkLp") | |
run_input = {"searchStringsArray": [website_name]} | |
run = apify_client.actor("nwua9Gu5YrADL7ZDj").call(run_input=run_input) | |
items = list(apify_client.dataset(run["defaultDatasetId"]).iterate_items()) | |
return items[0] if items else None | |
# Function to fetch weather info from OpenWeatherMap API | |
def fetch_weather_info(lat, lon): | |
API_KEY = "91b23cab82ee530b2052c8757e343b0d" | |
url = f"https://api.openweathermap.org/data/3.0/onecall?lat={lat}&lon={lon}&exclude=hourly,daily&appid={API_KEY}" | |
response = requests.get(url) | |
return response.json() | |
# Function to fetch website content using the updated actor | |
def fetch_website_content(website_url): | |
apify_client = ApifyClient("apify_api_uz0y556N4IG2aLcESj67kmnGSUpHF12XAkLp") | |
run_input = {} | |
run = apify_client.actor("moJRLRc85AitArpNN").call(run_input=run_input) | |
items = list(apify_client.dataset(run["defaultDatasetId"]).iterate_items()) | |
return items if items else None | |
# Function to fetch reviews using the new actor | |
def fetch_customer_reviews(google_maps_url): | |
apify_client = ApifyClient("apify_api_uz0y556N4IG2aLcESj67kmnGSUpHF12XAkLp") | |
run_input = { | |
"startUrls": [{"url": google_maps_url}], | |
"maxReviews": 20, | |
"reviewsSort": "newest", | |
"language": "en", | |
"personalData": True, | |
} | |
run = apify_client.actor("Xb8osYTtOjlsgI6k9").call(run_input=run_input) | |
items = list(apify_client.dataset(run["defaultDatasetId"]).iterate_items()) | |
return items | |
# Streamlit app for Data Visualization | |
st.title("Data Visualization") | |
# Input for website or company name | |
website_name = st.text_input("Enter a website / company name:") | |
if website_name: | |
# Initialize the progress bar | |
progress_bar = st.progress(0) | |
# Fetch Google Maps data | |
google_maps_data = fetch_google_maps_info(website_name) | |
progress_bar.progress(50) | |
if google_maps_data: | |
# Display website link | |
website_link = google_maps_data.get('website') | |
st.text_area("Website Link:", website_link) | |
# Display location and fetch weather info | |
lat = google_maps_data["location"]["lat"] | |
lng = google_maps_data["location"]["lng"] | |
st.map(pd.DataFrame({'lat': [lat], 'lon': [lng]})) | |
weather_data = fetch_weather_info(lat, lng) | |
current_weather = weather_data.get("current", {}) | |
temp = current_weather.get('temp') | |
temp_in_celsius = temp - 273.15 | |
st.write(f"**Location:** {lat}, {lng}") | |
st.write(f"**Temperature:** {temp_in_celsius:.2f}°C") | |
st.write(f"**Weather:** {current_weather.get('weather')[0].get('description')}") | |
# Display Occupancy Data | |
st.subheader("Occupancy Data") | |
occupancy_data = google_maps_data.get('popularTimesHistogram', {}) | |
for day, day_data in occupancy_data.items(): | |
hours = [entry['hour'] for entry in day_data] | |
occupancy = [entry['occupancyPercent'] for entry in day_data] | |
st.write(day) | |
st.bar_chart(pd.Series(occupancy, index=hours)) | |
# Fetch customer reviews from the new API | |
google_maps_url = google_maps_data.get('url') | |
reviews_data = fetch_customer_reviews(google_maps_url) | |
progress_bar.progress(75) | |
# Display the reviews from the new API | |
reviews_df = pd.DataFrame(reviews_data)[['text']] | |
st.subheader("Customer Reviews from New API") | |
st.table(reviews_df) | |
# Fetch and Display Website Content | |
st.subheader("Website Content") | |
website_content_data = fetch_website_content(website_link) | |
progress_bar.progress(100) | |
if website_content_data: | |
website_df = pd.DataFrame(website_content_data) | |
st.table(website_df) | |
else: | |
st.write("Unable to retrieve website content.") | |
else: | |
st.write("No results found for this website / company name on Google Maps.") |