Spaces:
Sleeping
Sleeping
File size: 5,642 Bytes
3b45348 8567ba1 3b45348 8567ba1 3b45348 9abd52a 8567ba1 3b45348 b112582 3b45348 a11ef3f b112582 a11ef3f 3b45348 a11ef3f 0ea303d a11ef3f 9abd52a a11ef3f 0ea303d a11ef3f de06db8 a11ef3f 0ea303d a11ef3f 8567ba1 0a584d5 8567ba1 0a584d5 0ea303d 8567ba1 a11ef3f 0a584d5 a11ef3f 0a584d5 a11ef3f 0ea303d 0a584d5 0ea303d 0a584d5 0ea303d a11ef3f 8567ba1 a11ef3f 8567ba1 a11ef3f 0ea303d a11ef3f 8567ba1 a11ef3f 8567ba1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 |
import requests
from bs4 import BeautifulSoup
import pandas as pd
import folium
from folium.plugins import MarkerCluster, HeatMap
import plotly.graph_objects as go
import plotly.express as px
from geopy.geocoders import Nominatim
import re
import streamlit as st
# Streamlit title and description
st.title("米其林餐廳指南爬蟲")
st.write("Extract restaurant data, visualize with a Sunburst chart and bar chart, and display locations on a map with heatmap.")
# Read data from Google Sheets
sheet_id = "1xUfnD1WCF5ldqECI8YXIko1gCpaDDCwTztL17kjI42U"
df1 = pd.read_csv(f"https://docs.google.com/spreadsheets/d/{sheet_id}/export?format=csv")
# Convert "網址" column to a Python list
urls = df1["網址"].tolist()
# Create a DataFrame to store all restaurant data
df = pd.DataFrame(columns=["Store Name", "Address", "Phone", "Latitude", "Longitude", "Region"])
# Initialize Nominatim geocoder
geolocator = Nominatim(user_agent="my_app")
# Function to extract region (區域) from the address using regex
def extract_region(address):
match = re.search(r'(.*?)區|縣|市', address)
if match:
return match.group(0)
else:
return "Unknown"
# Function to fetch and parse data
def fetch_data():
global df
# Progress bar in Streamlit
progress_bar = st.progress(0)
total_urls = len(urls)
# Iterate through each URL
for idx, url in enumerate(urls):
response = requests.get(url)
soup = BeautifulSoup(response.content, "html.parser")
try:
store_name = soup.find("h2", class_="restaurant-details__heading--title").text.strip()
except AttributeError:
store_name = None
try:
address = soup.find("li", class_="restaurant-details__heading--address").text.strip()
region = extract_region(address)
except AttributeError:
address = None
region = "Unknown"
# Try to extract phone number
try:
phone = soup.find("a", {"data-event": "CTA_tel"}).get("href").replace("tel:", "")
except AttributeError:
phone = None
try:
location = geolocator.geocode(address)
if location:
latitude = location.latitude
longitude = location.longitude
else:
latitude = None
longitude = None
except:
latitude = None
longitude = None
new_row = pd.DataFrame({
"Store Name": [store_name],
"Address": [address],
"Phone": [phone],
"Latitude": [latitude],
"Longitude": [longitude],
"Region": [region]
})
df = pd.concat([df, new_row], ignore_index=True)
# Update progress bar
progress_bar.progress((idx + 1) / total_urls)
# Button to trigger data fetching
if st.button("爬取餐廳資料"):
fetch_data()
# Save the DataFrame to CSV with UTF-8 encoding, including latitude and longitude
csv_file = "restaurants_data.csv"
df.to_csv(csv_file, encoding="utf-8-sig", index=False)
# Display the DataFrame as a table at the top
st.subheader("Restaurant Data")
st.dataframe(df)
# Display download button for the CSV
st.download_button(
label="Download restaurant data as CSV",
data=open(csv_file, "rb").read(),
file_name=csv_file,
mime="text/csv"
)
# Group the data by region and sum the number of restaurants
region_group = df.groupby("Region").size().reset_index(name='Count')
# Plot Sunburst chart
sunburst = px.sunburst(
region_group,
path=['Region'],
values='Count',
title="Restaurant Distribution by Region",
)
sunburst.update_layout(
title_x=0.5,
title_font=dict(size=24, family="Arial"),
height=600,
margin=dict(t=50, b=50, l=50, r=50)
)
st.subheader("Restaurant Distribution by Region (Sunburst Chart)")
st.plotly_chart(sunburst)
# Plot bar chart with custom colors and labels
bar_chart = go.Figure(go.Bar(
x=region_group["Region"],
y=region_group["Count"],
text=region_group["Count"],
textposition='auto',
marker=dict(color=px.colors.qualitative.Set2)
))
bar_chart.update_layout(
title="Restaurant Count by Region",
title_x=0.5,
title_font=dict(size=24, family="Arial"),
height=400,
margin=dict(t=50, b=50, l=50, r=50),
xaxis_title="Region",
yaxis_title="Number of Restaurants",
xaxis=dict(tickangle=-45)
)
st.subheader("Restaurant Count by Region (Bar Chart)")
st.plotly_chart(bar_chart)
# Display a map using Folium
st.subheader("Restaurant Locations Map with Heatmap")
# Create map centered around Tainan
m = folium.Map(location=[23.0, 120.2], zoom_start=12)
# Add marker cluster to the map
marker_cluster = MarkerCluster().add_to(m)
# Prepare data for heatmap
heat_data = []
for index, row in df.iterrows():
if pd.notnull(row["Latitude"]) and pd.notnull(row["Longitude"]):
folium.Marker(
location=[row["Latitude"], row["Longitude"]],
popup=f"{row['Store Name']} ({row['Phone']})",
tooltip=row["Address"]
).add_to(marker_cluster)
heat_data.append([row["Latitude"], row["Longitude"]])
# Add heatmap layer
HeatMap(heat_data).add_to(m)
# Display the map in Streamlit
st.components.v1.html(m._repr_html_(), height=600) |