DreamStream-1 commited on
Commit
ee50fd8
·
verified ·
1 Parent(s): 19d71b0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -97
app.py CHANGED
@@ -2,9 +2,6 @@ import gradio as gr
2
  import nltk
3
  import numpy as np
4
  import tflearn
5
- import random
6
- import json
7
- import pickle
8
  import torch
9
  from nltk.tokenize import word_tokenize
10
  from nltk.stem.lancaster import LancasterStemmer
@@ -17,10 +14,10 @@ from selenium import webdriver
17
  from selenium.webdriver.chrome.options import Options
18
  import chromedriver_autoinstaller
19
 
20
- # Ensure necessary NLTK resources are downloaded
21
  nltk.download('punkt')
22
 
23
- # --- Constants ---
24
  GOOGLE_MAPS_API_KEY = os.environ.get("GOOGLE_API_KEY") # Get API key from environment variable
25
  if not GOOGLE_MAPS_API_KEY:
26
  raise ValueError("Error: GOOGLE_MAPS_API_KEY environment variable not set.")
@@ -29,8 +26,7 @@ url = "https://maps.googleapis.com/maps/api/place/textsearch/json"
29
  places_details_url = "https://maps.googleapis.com/maps/api/place/details/json"
30
  query = "therapist OR counselor OR mental health professional OR marriage and family therapist OR psychotherapist OR psychiatrist OR psychologist OR nutritionist OR wellness doctor OR holistic practitioner OR integrative medicine OR chiropractor OR naturopath"
31
 
32
-
33
- # --- Chatbot Logic ---
34
  stemmer = LancasterStemmer()
35
 
36
  try:
@@ -40,8 +36,8 @@ except FileNotFoundError:
40
  raise FileNotFoundError("Error: 'intents.json' file not found.")
41
 
42
  try:
43
- with open("data.pickle", "rb") as f:
44
- words, labels, training, output = pickle.load(f)
45
  except FileNotFoundError:
46
  raise FileNotFoundError("Error: 'data.pickle' file not found.")
47
 
@@ -49,9 +45,9 @@ net = tflearn.input_data(shape=[None, len(training[0])])
49
  net = tflearn.fully_connected(net, 8)
50
  net = tflearn.fully_connected(net, 8)
51
  net = tflearn.fully_connected(net, len(output[0]), activation="softmax")
52
- net = tflearn.regression(net)
53
 
54
  model = tflearn.DNN(net)
 
55
  try:
56
  model.load("MentalHealthChatBotmodel.tflearn")
57
  except FileNotFoundError:
@@ -78,47 +74,39 @@ def chat(message, history):
78
  if tg['tag'] == tag:
79
  responses = tg['responses']
80
  response = random.choice(responses)
81
- break
82
- else:
83
- response = "I'm sorry, I didn't understand that. Could you please rephrase?"
84
  except Exception as e:
85
- response = f"An error occurred: {str(e)}"
86
- history.append((message, response))
87
  return history, history
88
 
89
- # --- Sentiment Analysis ---
90
  tokenizer = AutoTokenizer.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment")
91
  model_sentiment = AutoModelForSequenceClassification.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment")
92
 
93
  def analyze_sentiment(text):
94
  try:
95
- inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
96
  with torch.no_grad():
97
- outputs = model_sentiment(**inputs)
98
- predicted_class = torch.argmax(outputs.logits, dim=1).item()
99
- sentiment = ["Negative", "Neutral", "Positive"][predicted_class]
100
  return f"**Predicted Sentiment:** {sentiment}"
101
  except Exception as e:
102
  return f"Error analyzing sentiment: {str(e)}"
103
 
104
- # --- Emotion Detection (Placeholder) ---
105
  def detect_emotion(text):
106
- # Replace with your actual emotion detection logic
107
  return "Emotion detection not implemented"
108
 
109
- # --- Suggestion Generation (Placeholder) ---
110
  def provide_suggestions(emotion):
111
- # Replace with your actual suggestion generation logic
112
  return pd.DataFrame(columns=["Subject", "Article URL", "Video URL"])
113
 
114
- # --- Google Places API Functions ---
115
  def get_places_data(query, location, radius, api_key, next_page_token=None):
116
- params = {
117
- "query": query,
118
- "location": location,
119
- "radius": radius,
120
- "key": api_key
121
- }
122
  if next_page_token:
123
  params["pagetoken"] = next_page_token
124
  response = requests.get(url, params=params)
@@ -140,28 +128,11 @@ def get_place_details(place_id, api_key):
140
  else:
141
  return {}
142
 
143
- def scrape_website_from_google_maps(place_name):
144
- chrome_options = Options()
145
- chrome_options.add_argument("--headless")
146
- chrome_options.add_argument("--no-sandbox")
147
- chrome_options.add_argument("--disable-dev-shm-usage")
148
- driver = webdriver.Chrome(options=chrome_options)
149
- search_url = f"https://www.google.com/maps/search/{place_name.replace(' ', '+')}"
150
- driver.get(search_url)
151
- time.sleep(5)
152
- try:
153
- website_element = driver.find_element("xpath", '//a[contains(@aria-label, "Visit") and contains(@aria-label, "website")]')
154
- website_url = website_element.get_attribute('href')
155
- except:
156
- website_url = "Not available"
157
- driver.quit()
158
- return website_url
159
-
160
  def get_all_places(query, location, radius, api_key):
161
  all_results = []
162
  next_page_token = None
163
  while True:
164
- data = get_places_data(query + f" in {location}", location, radius, api_key, next_page_token)
165
  if data:
166
  results = data.get('results', [])
167
  for place in results:
@@ -179,52 +150,22 @@ def get_all_places(query, location, radius, api_key):
179
  break
180
  return all_results
181
 
182
- # --- Gradio Interface ---
183
- def gradio_interface(message, location, state, btn_chat, btn_search):
184
  history = state or []
185
- if len(history) == 0:
186
- if btn_chat:
187
- history, _ = chat(message, history)
188
- sentiment = analyze_sentiment(message)
189
- emotion = detect_emotion(message)
190
- suggestions = provide_suggestions(emotion)
191
- if location:
192
- try:
193
- wellness_results = pd.DataFrame(get_all_places(query, location, 50000, GOOGLE_MAPS_API_KEY), columns=["Name", "Address", "Phone", "Website"])
194
- except Exception as e:
195
- wellness_results = pd.DataFrame([["Error fetching data: " + str(e), "", "", ""]], columns=["Name", "Address", "Phone", "Website"])
196
- else:
197
- wellness_results = pd.DataFrame([["", "", "", ""]], columns=["Name", "Address", "Phone", "Website"])
198
  else:
199
- history = history
200
- sentiment = ""
201
- emotion = ""
202
- suggestions = pd.DataFrame(columns=["Subject", "Article URL", "Video URL"])
203
- wellness_results = pd.DataFrame([["", "", "", ""]], columns=["Name", "Address", "Phone", "Website"])
204
- elif len(history) > 0 and location == "":
205
- if btn_chat:
206
- history, _ = chat(message, history)
207
- sentiment = analyze_sentiment(message)
208
- emotion = detect_emotion(message)
209
- suggestions = provide_suggestions(emotion)
210
  wellness_results = pd.DataFrame([["", "", "", ""]], columns=["Name", "Address", "Phone", "Website"])
211
- else:
212
- history = history
213
- sentiment = ""
214
- emotion = ""
215
- suggestions = pd.DataFrame(columns=["Subject", "Article URL", "Video URL"])
216
- wellness_results = pd.DataFrame([["", "", "", ""]], columns=["Name", "Address", "Phone", "Website"])
217
- elif len(history) > 0 and location != "" and btn_search:
218
- try:
219
- wellness_results = pd.DataFrame(get_all_places(query, location, 50000, GOOGLE_MAPS_API_KEY), columns=["Name", "Address", "Phone", "Website"])
220
- sentiment = analyze_sentiment(message)
221
- emotion = detect_emotion(message)
222
- suggestions = provide_suggestions(emotion)
223
- history, _ = chat(message, history)
224
- except Exception as e:
225
- wellness_results = pd.DataFrame([["Error: " + str(e), "", "", ""]], columns=["Name", "Address", "Phone", "Website"])
226
  else:
227
- history = history
228
  sentiment = ""
229
  emotion = ""
230
  suggestions = pd.DataFrame(columns=["Subject", "Article URL", "Video URL"])
@@ -232,14 +173,13 @@ def gradio_interface(message, location, state, btn_chat, btn_search):
232
 
233
  return history, sentiment, emotion, suggestions, wellness_results, history
234
 
235
- iface = gr.Interface(
236
  fn=gradio_interface,
237
  inputs=[
238
  gr.Textbox(label="Enter your message", placeholder="How are you feeling today?"),
239
  gr.Textbox(label="Enter your location (e.g., 'Hawaii, USA')", placeholder="Enter your location (optional)"),
240
  gr.State(),
241
- gr.Button("Chat"),
242
- gr.Button("Search")
243
  ],
244
  outputs=[
245
  gr.Chatbot(label="Chatbot Responses"),
@@ -252,6 +192,4 @@ iface = gr.Interface(
252
  live=True,
253
  title="Mental Health Chatbot with Wellness Professional Search",
254
  description="This chatbot provides mental health support with sentiment analysis, emotion detection, suggestions, and a list of nearby wellness professionals. Interact with the chatbot first, then enter a location to search."
255
- )
256
-
257
- iface.launch(debug=True, share=True)
 
2
  import nltk
3
  import numpy as np
4
  import tflearn
 
 
 
5
  import torch
6
  from nltk.tokenize import word_tokenize
7
  from nltk.stem.lancaster import LancasterStemmer
 
14
  from selenium.webdriver.chrome.options import Options
15
  import chromedriver_autoinstaller
16
 
17
+ # Ensure NLTK resources are downloaded
18
  nltk.download('punkt')
19
 
20
+ # Constants
21
  GOOGLE_MAPS_API_KEY = os.environ.get("GOOGLE_API_KEY") # Get API key from environment variable
22
  if not GOOGLE_MAPS_API_KEY:
23
  raise ValueError("Error: GOOGLE_MAPS_API_KEY environment variable not set.")
 
26
  places_details_url = "https://maps.googleapis.com/maps/api/place/details/json"
27
  query = "therapist OR counselor OR mental health professional OR marriage and family therapist OR psychotherapist OR psychiatrist OR psychologist OR nutritionist OR wellness doctor OR holistic practitioner OR integrative medicine OR chiropractor OR naturopath"
28
 
29
+ # Chatbot
 
30
  stemmer = LancasterStemmer()
31
 
32
  try:
 
36
  raise FileNotFoundError("Error: 'intents.json' file not found.")
37
 
38
  try:
39
+ with open("data.pickle", "rb") as file:
40
+ words, labels, training, output = pickle.load(file)
41
  except FileNotFoundError:
42
  raise FileNotFoundError("Error: 'data.pickle' file not found.")
43
 
 
45
  net = tflearn.fully_connected(net, 8)
46
  net = tflearn.fully_connected(net, 8)
47
  net = tflearn.fully_connected(net, len(output[0]), activation="softmax")
 
48
 
49
  model = tflearn.DNN(net)
50
+
51
  try:
52
  model.load("MentalHealthChatBotmodel.tflearn")
53
  except FileNotFoundError:
 
74
  if tg['tag'] == tag:
75
  responses = tg['responses']
76
  response = random.choice(responses)
77
+ history.append((message, response))
 
 
78
  except Exception as e:
79
+ response = "I'm sorry, I didn't understand that. Could you please rephrase?"
80
+ history.append((message, response))
81
  return history, history
82
 
83
+ # Sentiment Analysis
84
  tokenizer = AutoTokenizer.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment")
85
  model_sentiment = AutoModelForSequenceClassification.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment")
86
 
87
  def analyze_sentiment(text):
88
  try:
89
+ inputs = tokenizer(text, return_tensors="pt")
90
  with torch.no_grad():
91
+ logits = model_sentiment(**inputs).logits
92
+ sentiment = ["Negative", "Neutral", "Positive"][torch.argmax(logits)]
 
93
  return f"**Predicted Sentiment:** {sentiment}"
94
  except Exception as e:
95
  return f"Error analyzing sentiment: {str(e)}"
96
 
97
+ # Emotion Detection
98
  def detect_emotion(text):
99
+ # Implement your own emotion detection logic
100
  return "Emotion detection not implemented"
101
 
102
+ # Suggestion Generation
103
  def provide_suggestions(emotion):
104
+ # Implement your own suggestion generation logic
105
  return pd.DataFrame(columns=["Subject", "Article URL", "Video URL"])
106
 
107
+ # Google Places API Functions
108
  def get_places_data(query, location, radius, api_key, next_page_token=None):
109
+ params = {"query": query, "location": location, "radius": radius, "key": api_key}
 
 
 
 
 
110
  if next_page_token:
111
  params["pagetoken"] = next_page_token
112
  response = requests.get(url, params=params)
 
128
  else:
129
  return {}
130
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
  def get_all_places(query, location, radius, api_key):
132
  all_results = []
133
  next_page_token = None
134
  while True:
135
+ data = get_places_data(query, location, radius, api_key, next_page_token)
136
  if data:
137
  results = data.get('results', [])
138
  for place in results:
 
150
  break
151
  return all_results
152
 
153
+ # Gradio Interface
154
+ def gradio_interface(message, location, state):
155
  history = state or []
156
+ if message:
157
+ history, _ = chat(message, history)
158
+ sentiment = analyze_sentiment(message)
159
+ emotion = detect_emotion(message)
160
+ suggestions = provide_suggestions(emotion)
161
+ if location:
162
+ try:
163
+ wellness_results = pd.DataFrame(get_all_places(query, location, 50000, GOOGLE_MAPS_API_KEY), columns=["Name", "Address", "Phone", "Website"])
164
+ except Exception as e:
165
+ wellness_results = pd.DataFrame([["Error fetching data: " + str(e), "", "", ""]], columns=["Name", "Address", "Phone", "Website"])
 
 
 
166
  else:
 
 
 
 
 
 
 
 
 
 
 
167
  wellness_results = pd.DataFrame([["", "", "", ""]], columns=["Name", "Address", "Phone", "Website"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
168
  else:
 
169
  sentiment = ""
170
  emotion = ""
171
  suggestions = pd.DataFrame(columns=["Subject", "Article URL", "Video URL"])
 
173
 
174
  return history, sentiment, emotion, suggestions, wellness_results, history
175
 
176
+ gr.Interface(
177
  fn=gradio_interface,
178
  inputs=[
179
  gr.Textbox(label="Enter your message", placeholder="How are you feeling today?"),
180
  gr.Textbox(label="Enter your location (e.g., 'Hawaii, USA')", placeholder="Enter your location (optional)"),
181
  gr.State(),
182
+ gr.Button("Send")
 
183
  ],
184
  outputs=[
185
  gr.Chatbot(label="Chatbot Responses"),
 
192
  live=True,
193
  title="Mental Health Chatbot with Wellness Professional Search",
194
  description="This chatbot provides mental health support with sentiment analysis, emotion detection, suggestions, and a list of nearby wellness professionals. Interact with the chatbot first, then enter a location to search."
195
+ ).launch(debug=True, share=True)