Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -38,28 +38,7 @@ min_rating = st.slider("請輸入想查詢的最低評分:", 1.0, 5.0, 3.5)
|
|
38 |
|
39 |
# List of URLs to scrape
|
40 |
urls = [
|
41 |
-
|
42 |
-
"https://www.tw-animal.com/pet/171211/c000186.html",
|
43 |
-
"https://www.tw-animal.com/pet/171211/c000186.html",
|
44 |
-
"https://www.tw-animal.com/pet/171211/c000081.html",
|
45 |
-
"https://www.tw-animal.com/pet/171211/c001166.html",
|
46 |
-
"https://www.tw-animal.com/pet/171211/c000773.html",
|
47 |
-
"https://www.tw-animal.com/pet/171211/c001038.html",
|
48 |
-
"https://www.tw-animal.com/pet/171211/c000741.html",
|
49 |
-
"https://www.tw-animal.com/pet/171211/c001451.html",
|
50 |
-
"https://www.tw-animal.com/pet/171211/c000102.html",
|
51 |
-
"https://www.tw-animal.com/pet/171211/c000757.html",
|
52 |
-
"https://www.tw-animal.com/pet/171211/c000703.html",
|
53 |
-
"https://www.tw-animal.com/pet/171211/c000481.html",
|
54 |
-
"https://www.tw-animal.com/pet/171211/c000971.html",
|
55 |
-
"https://www.tw-animal.com/pet/171211/c000187.html",
|
56 |
-
"https://www.tw-animal.com/pet/171211/c001357.html",
|
57 |
-
"https://www.tw-animal.com/pet/171211/c001065.html",
|
58 |
-
"https://www.tw-animal.com/pet/171211/c000165.html",
|
59 |
-
"https://www.tw-animal.com/pet/171211/c001138.html",
|
60 |
-
"https://www.tw-animal.com/pet/171211/c000484.html",
|
61 |
-
"https://www.tw-animal.com/pet/171211/c001089.html",
|
62 |
-
"https://www.tw-animal.com/pet/171211/c001252.html"
|
63 |
]
|
64 |
|
65 |
# Create an empty list to store the extracted data
|
@@ -67,13 +46,18 @@ data_list = []
|
|
67 |
|
68 |
# Initialize the geolocator
|
69 |
geolocator = Nominatim(user_agent="geoapiExercises")
|
|
|
70 |
|
71 |
-
# Function to geocode an address with retry logic
|
72 |
-
def geocode_address(address, retries=
|
|
|
|
|
|
|
73 |
for i in range(retries):
|
74 |
try:
|
75 |
location = geolocator.geocode(address)
|
76 |
if location:
|
|
|
77 |
return location
|
78 |
except (GeocoderTimedOut, GeocoderServiceError) as e:
|
79 |
st.warning(f"Geocoding error: {e}. Retrying...")
|
@@ -81,6 +65,7 @@ def geocode_address(address, retries=3, delay=2):
|
|
81 |
except GeocoderServiceError as e:
|
82 |
st.error(f"Service error: {e}")
|
83 |
break
|
|
|
84 |
st.warning(f"Failed to geocode address: {address}")
|
85 |
return None
|
86 |
|
|
|
38 |
|
39 |
# List of URLs to scrape
|
40 |
urls = [
|
41 |
+
# (List of URLs)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42 |
]
|
43 |
|
44 |
# Create an empty list to store the extracted data
|
|
|
46 |
|
47 |
# Initialize the geolocator
|
48 |
geolocator = Nominatim(user_agent="geoapiExercises")
|
49 |
+
geocode_cache = {} # Simple in-memory cache
|
50 |
|
51 |
+
# Function to geocode an address with retry logic and caching
|
52 |
+
def geocode_address(address, retries=5, delay=5):
|
53 |
+
if address in geocode_cache:
|
54 |
+
return geocode_cache[address]
|
55 |
+
|
56 |
for i in range(retries):
|
57 |
try:
|
58 |
location = geolocator.geocode(address)
|
59 |
if location:
|
60 |
+
geocode_cache[address] = location
|
61 |
return location
|
62 |
except (GeocoderTimedOut, GeocoderServiceError) as e:
|
63 |
st.warning(f"Geocoding error: {e}. Retrying...")
|
|
|
65 |
except GeocoderServiceError as e:
|
66 |
st.error(f"Service error: {e}")
|
67 |
break
|
68 |
+
|
69 |
st.warning(f"Failed to geocode address: {address}")
|
70 |
return None
|
71 |
|