antfraia commited on
Commit
7aa96dc
·
1 Parent(s): ba1946b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -65
app.py CHANGED
@@ -1,77 +1,40 @@
1
  import streamlit as st
2
  from apify_client import ApifyClient
3
 
4
- APIFY_API_TOKEN = "apify_api_uz0y556N4IG2aLcESj67kmnGSUpHF12XAkLp"
5
- APIFY_WEATHER_KEY = "91b23cab82ee530b2052c8757e343b0d"
6
 
7
- client = ApifyClient(APIFY_API_TOKEN)
 
8
 
9
- def fetch_places_from_google_maps(website_name, location):
10
- run_input = {
11
- "searchStringsArray": [website_name],
12
- "locationQuery": location,
13
- "maxCrawledPlacesPerSearch": 1, # Fetching only one record for simplicity
14
- # Other input parameters can be added as per your requirements
15
- }
16
 
17
- run = client.actor("mc9KJTQJg3zfQpANg/nwua9Gu5YrADL7ZDj").call(run_input=run_input)
18
-
19
- items = list(client.dataset(run["defaultDatasetId"]).iterate_items())
20
- if items:
21
- return items[0]
22
- return None
23
 
24
- def get_weather_data(lat, lon):
25
- BASE_URL = f"https://api.openweathermap.org/data/3.0/onecall"
26
- params = {
27
- "lat": lat,
28
- "lon": lon,
29
- "exclude": "hourly,daily,minutely",
30
- "appid": APIFY_WEATHER_KEY,
31
- "units": "metric"
32
- }
33
- response = requests.get(BASE_URL, params=params)
34
- if response.status_code == 200:
35
- return response.json()
36
- else:
37
- return None
38
-
39
- # Streamlit UI
40
- st.title("Website Information Extractor")
 
41
 
42
- website_name = st.text_input("Enter a website/company name:")
43
- location = st.text_input("Enter location (e.g., New York, USA):")
44
 
45
- if website_name and location:
46
  place_data = fetch_places_from_google_maps(website_name, location)
47
-
48
  if place_data:
49
- st.subheader("Place Details")
50
- st.write(f"Name: {place_data.get('name')}")
51
- st.write(f"Address: {place_data.get('address')}")
52
- st.write(f"Rating: {place_data.get('rating')}")
53
-
54
- # Display reviews if available
55
- if 'reviews' in place_data and place_data['reviews']:
56
- st.subheader("Reviews")
57
- for review in place_data['reviews']:
58
- st.write(f"Reviewer: {review.get('reviewerName', 'N/A')}")
59
- st.write(f"Review: {review.get('text', 'N/A')}")
60
-
61
- # Display images if available
62
- if 'imageUrls' in place_data and place_data['imageUrls']:
63
- st.subheader("Images")
64
- for image_url in place_data['imageUrls']:
65
- st.image(image_url)
66
-
67
- lat, lon = place_data.get("location", {}).get("lat"), place_data.get("location", {}).get("lng")
68
-
69
- if lat and lon:
70
- weather_data = get_weather_data(lat, lon)
71
- if weather_data:
72
- temp = weather_data["current"]["temp"]
73
- weather_desc = weather_data["current"]["weather"][0]["description"]
74
- st.subheader("Current Weather")
75
- st.write(f"It's {temp}°C with {weather_desc}.")
76
  else:
77
- st.write("Couldn't fetch details for the provided input. Please try again.")
 
1
  import streamlit as st
2
  from apify_client import ApifyClient
3
 
4
+ # Initialize the ApifyClient with your API token
5
+ client = ApifyClient("apify_api_uz0y556N4IG2aLcESj67kmnGSUpHF12XAkLp")
6
 
7
+ # Streamlit UI
8
+ st.title("Places Information")
9
 
10
+ website_name = st.text_input("Enter the type of place (e.g., restaurant, cafe):", "restaurant")
11
+ location = st.text_input("Enter the location:", "New York, USA")
12
+ button = st.button("Fetch Information")
 
 
 
 
13
 
 
 
 
 
 
 
14
 
15
+ def fetch_places_from_google_maps(website_name, location):
16
+ try:
17
+ run_input = {
18
+ "searchStringsArray": [website_name],
19
+ "locationQuery": location,
20
+ "maxCrawledPlacesPerSearch": 1, # Fetching only one record for simplicity
21
+ # Other input parameters can be added as per your requirements
22
+ }
23
+
24
+ # Update the actor call
25
+ run = client.actor("compass~crawler-google-places").call(run_input=run_input)
26
+
27
+ items = list(client.dataset(run["defaultDatasetId"]).iterate_items())
28
+ if items:
29
+ return items[0]
30
+ except Exception as e:
31
+ st.write(f"Error: {str(e)}")
32
+ return None
33
 
 
 
34
 
35
+ if button:
36
  place_data = fetch_places_from_google_maps(website_name, location)
 
37
  if place_data:
38
+ st.write(place_data)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  else:
40
+ st.write("No data found!")