DreamStream-1 commited on
Commit
5e7f7ee
·
verified ·
1 Parent(s): ea569c1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -22
app.py CHANGED
@@ -6,18 +6,13 @@ import numpy as np
6
  import tflearn
7
  import gradio as gr
8
  import requests
9
- import time
10
- from bs4 import BeautifulSoup
11
- from selenium import webdriver
12
- from selenium.webdriver.chrome.options import Options
13
- from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline
14
  import torch
15
  import pandas as pd
16
- import os
17
- import chromedriver_autoinstaller
18
- import streamlit as st # Add Streamlit import
19
  from nltk.tokenize import word_tokenize
20
  from nltk.stem.lancaster import LancasterStemmer
 
21
 
22
  # Ensure necessary NLTK resources are downloaded
23
  nltk.download('punkt')
@@ -96,7 +91,6 @@ tokenizer_sentiment = AutoTokenizer.from_pretrained("cardiffnlp/twitter-roberta-
96
  model_sentiment = AutoModelForSequenceClassification.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment")
97
 
98
  # Emotion detection setup
99
- @st.cache_resource
100
  def load_emotion_model():
101
  tokenizer = AutoTokenizer.from_pretrained("j-hartmann/emotion-english-distilroberta-base")
102
  model = AutoModelForSequenceClassification.from_pretrained("j-hartmann/emotion-english-distilroberta-base")
@@ -109,18 +103,6 @@ url = "https://maps.googleapis.com/maps/api/place/textsearch/json"
109
  places_details_url = "https://maps.googleapis.com/maps/api/place/details/json"
110
  api_key = os.getenv("GOOGLE_API_KEY") # Use environment variable for security
111
 
112
- # Install Chrome and Chromedriver for web scraping
113
- def install_chrome_and_driver():
114
- os.system("apt-get update")
115
- os.system("apt-get install -y wget curl")
116
- os.system("wget -q https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb")
117
- os.system("dpkg -i google-chrome-stable_current_amd64.deb")
118
- os.system("apt-get install -y -f")
119
- os.system("google-chrome-stable --version")
120
- chromedriver_autoinstaller.install()
121
-
122
- install_chrome_and_driver()
123
-
124
  # Function to get places data using Google Places API
125
  def get_places_data(query, location, radius, api_key, next_page_token=None):
126
  params = {
@@ -139,10 +121,38 @@ def get_places_data(query, location, radius, api_key, next_page_token=None):
139
  else:
140
  return None
141
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
142
  # Main function to fetch wellness professional data and display on map
143
  def get_wellness_professionals(location):
144
  query = "therapist OR counselor OR mental health professional OR marriage and family therapist OR psychotherapist OR psychiatrist OR psychologist OR nutritionist OR wellness doctor OR holistic practitioner OR integrative medicine OR chiropractor OR naturopath"
145
  radius = 50000 # 50 km radius
 
 
146
  data = get_places_data(query, location, radius, api_key)
147
 
148
  if data:
@@ -155,7 +165,9 @@ def get_wellness_professionals(location):
155
  longitude = place.get("geometry", {}).get("location", {}).get("lng")
156
  wellness_data.append([name, address, latitude, longitude])
157
  return wellness_data
158
- return []
 
 
159
 
160
  # Gradio interface setup for user interaction
161
  def user_interface(message, location, history):
 
6
  import tflearn
7
  import gradio as gr
8
  import requests
 
 
 
 
 
9
  import torch
10
  import pandas as pd
11
+ from bs4 import BeautifulSoup
12
+ from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline
 
13
  from nltk.tokenize import word_tokenize
14
  from nltk.stem.lancaster import LancasterStemmer
15
+ import os
16
 
17
  # Ensure necessary NLTK resources are downloaded
18
  nltk.download('punkt')
 
91
  model_sentiment = AutoModelForSequenceClassification.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment")
92
 
93
  # Emotion detection setup
 
94
  def load_emotion_model():
95
  tokenizer = AutoTokenizer.from_pretrained("j-hartmann/emotion-english-distilroberta-base")
96
  model = AutoModelForSequenceClassification.from_pretrained("j-hartmann/emotion-english-distilroberta-base")
 
103
  places_details_url = "https://maps.googleapis.com/maps/api/place/details/json"
104
  api_key = os.getenv("GOOGLE_API_KEY") # Use environment variable for security
105
 
 
 
 
 
 
 
 
 
 
 
 
 
106
  # Function to get places data using Google Places API
107
  def get_places_data(query, location, radius, api_key, next_page_token=None):
108
  params = {
 
121
  else:
122
  return None
123
 
124
+ # Web scraping function to get wellness professional data (alternative to API)
125
+ def scrape_wellness_professionals(query, location):
126
+ # User-Agent header to simulate a browser request
127
+ headers = {
128
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
129
+ }
130
+
131
+ search_url = f"https://www.google.com/search?q={query}+near+{location}"
132
+
133
+ # Make a request to the search URL with headers
134
+ response = requests.get(search_url, headers=headers)
135
+ if response.status_code == 200:
136
+ soup = BeautifulSoup(response.text, 'html.parser')
137
+
138
+ # Parse and extract wellness professionals from the HTML
139
+ wellness_data = []
140
+ results = soup.find_all("div", class_="BVG0Nb") # Adjust class based on the actual HTML structure
141
+ for result in results:
142
+ name = result.get_text()
143
+ link = result.find("a")["href"] if result.find("a") else None
144
+ wellness_data.append([name, link])
145
+
146
+ return wellness_data
147
+ else:
148
+ return []
149
+
150
  # Main function to fetch wellness professional data and display on map
151
  def get_wellness_professionals(location):
152
  query = "therapist OR counselor OR mental health professional OR marriage and family therapist OR psychotherapist OR psychiatrist OR psychologist OR nutritionist OR wellness doctor OR holistic practitioner OR integrative medicine OR chiropractor OR naturopath"
153
  radius = 50000 # 50 km radius
154
+
155
+ # Using Google Places API if available
156
  data = get_places_data(query, location, radius, api_key)
157
 
158
  if data:
 
165
  longitude = place.get("geometry", {}).get("location", {}).get("lng")
166
  wellness_data.append([name, address, latitude, longitude])
167
  return wellness_data
168
+
169
+ # Fallback to scraping if API is not available or fails
170
+ return scrape_wellness_professionals(query, location)
171
 
172
  # Gradio interface setup for user interaction
173
  def user_interface(message, location, history):