Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,77 +1,40 @@
|
|
1 |
import streamlit as st
|
2 |
from apify_client import ApifyClient
|
3 |
|
4 |
-
|
5 |
-
|
6 |
|
7 |
-
|
|
|
8 |
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
"locationQuery": location,
|
13 |
-
"maxCrawledPlacesPerSearch": 1, # Fetching only one record for simplicity
|
14 |
-
# Other input parameters can be added as per your requirements
|
15 |
-
}
|
16 |
|
17 |
-
run = client.actor("mc9KJTQJg3zfQpANg/nwua9Gu5YrADL7ZDj").call(run_input=run_input)
|
18 |
-
|
19 |
-
items = list(client.dataset(run["defaultDatasetId"]).iterate_items())
|
20 |
-
if items:
|
21 |
-
return items[0]
|
22 |
-
return None
|
23 |
|
24 |
-
def
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
st.
|
|
|
41 |
|
42 |
-
website_name = st.text_input("Enter a website/company name:")
|
43 |
-
location = st.text_input("Enter location (e.g., New York, USA):")
|
44 |
|
45 |
-
if
|
46 |
place_data = fetch_places_from_google_maps(website_name, location)
|
47 |
-
|
48 |
if place_data:
|
49 |
-
st.
|
50 |
-
st.write(f"Name: {place_data.get('name')}")
|
51 |
-
st.write(f"Address: {place_data.get('address')}")
|
52 |
-
st.write(f"Rating: {place_data.get('rating')}")
|
53 |
-
|
54 |
-
# Display reviews if available
|
55 |
-
if 'reviews' in place_data and place_data['reviews']:
|
56 |
-
st.subheader("Reviews")
|
57 |
-
for review in place_data['reviews']:
|
58 |
-
st.write(f"Reviewer: {review.get('reviewerName', 'N/A')}")
|
59 |
-
st.write(f"Review: {review.get('text', 'N/A')}")
|
60 |
-
|
61 |
-
# Display images if available
|
62 |
-
if 'imageUrls' in place_data and place_data['imageUrls']:
|
63 |
-
st.subheader("Images")
|
64 |
-
for image_url in place_data['imageUrls']:
|
65 |
-
st.image(image_url)
|
66 |
-
|
67 |
-
lat, lon = place_data.get("location", {}).get("lat"), place_data.get("location", {}).get("lng")
|
68 |
-
|
69 |
-
if lat and lon:
|
70 |
-
weather_data = get_weather_data(lat, lon)
|
71 |
-
if weather_data:
|
72 |
-
temp = weather_data["current"]["temp"]
|
73 |
-
weather_desc = weather_data["current"]["weather"][0]["description"]
|
74 |
-
st.subheader("Current Weather")
|
75 |
-
st.write(f"It's {temp}°C with {weather_desc}.")
|
76 |
else:
|
77 |
-
st.write("
|
|
|
1 |
import streamlit as st
|
2 |
from apify_client import ApifyClient
|
3 |
|
4 |
+
# Initialize the ApifyClient with your API token
|
5 |
+
client = ApifyClient("apify_api_uz0y556N4IG2aLcESj67kmnGSUpHF12XAkLp")
|
6 |
|
7 |
+
# Streamlit UI
|
8 |
+
st.title("Places Information")
|
9 |
|
10 |
+
website_name = st.text_input("Enter the type of place (e.g., restaurant, cafe):", "restaurant")
|
11 |
+
location = st.text_input("Enter the location:", "New York, USA")
|
12 |
+
button = st.button("Fetch Information")
|
|
|
|
|
|
|
|
|
13 |
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
|
15 |
+
def fetch_places_from_google_maps(website_name, location):
|
16 |
+
try:
|
17 |
+
run_input = {
|
18 |
+
"searchStringsArray": [website_name],
|
19 |
+
"locationQuery": location,
|
20 |
+
"maxCrawledPlacesPerSearch": 1, # Fetching only one record for simplicity
|
21 |
+
# Other input parameters can be added as per your requirements
|
22 |
+
}
|
23 |
+
|
24 |
+
# Update the actor call
|
25 |
+
run = client.actor("compass~crawler-google-places").call(run_input=run_input)
|
26 |
+
|
27 |
+
items = list(client.dataset(run["defaultDatasetId"]).iterate_items())
|
28 |
+
if items:
|
29 |
+
return items[0]
|
30 |
+
except Exception as e:
|
31 |
+
st.write(f"Error: {str(e)}")
|
32 |
+
return None
|
33 |
|
|
|
|
|
34 |
|
35 |
+
if button:
|
36 |
place_data = fetch_places_from_google_maps(website_name, location)
|
|
|
37 |
if place_data:
|
38 |
+
st.write(place_data)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
else:
|
40 |
+
st.write("No data found!")
|