Spaces:
Sleeping
Sleeping
File size: 6,451 Bytes
334ba26 0e313c1 334ba26 7684892 ebca5ff 334ba26 0e313c1 e859494 0e313c1 99a3ba4 0e313c1 fa97be4 0e313c1 7684892 0e313c1 334ba26 0e313c1 334ba26 0e313c1 af44e7d 0e313c1 99a3ba4 0e313c1 99a3ba4 0e313c1 334ba26 0e313c1 334ba26 ebca5ff 0e313c1 e6396eb 0e313c1 e6396eb 0e313c1 674b44a 0e313c1 d7c7798 0e313c1 99a3ba4 0e313c1 674b44a 0e313c1 99a3ba4 0e313c1 99a3ba4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 |
import nltk
import numpy as np
import random
import json
import pickle
import gradio as gr
import requests
import folium
from nltk.tokenize import word_tokenize
from nltk.stem.lancaster import LancasterStemmer
from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline
import tensorflow as tf
import tflearn
import torch
import pandas as pd
import time
from bs4 import BeautifulSoup
import re # Added for regex operations
import os
# Google Places API endpoint
url = "https://maps.googleapis.com/maps/api/place/textsearch/json"
places_details_url = "https://maps.googleapis.com/maps/api/place/details/json"
# Initialize necessary libraries for chatbot and NLP
nltk.download('punkt')
stemmer = LancasterStemmer()
# Load the chatbot intents file
with open("intents.json") as file:
data = json.load(file)
# Load preprocessed data from pickle
with open("data.pickle", "rb") as f:
words, labels, training, output = pickle.load(f)
# Build the chatbot model
net = tflearn.input_data(shape=[None, len(training[0])])
net = tflearn.fully_connected(net, 8)
net = tflearn.fully_connected(net, 8)
net = tflearn.fully_connected(net, len(output[0]), activation="softmax")
net = tflearn.regression(net)
model = tflearn.DNN(net)
model.load("MentalHealthChatBotmodel.tflearn")
# Emotion and sentiment analysis model
def load_model():
tokenizer = AutoTokenizer.from_pretrained("j-hartmann/emotion-english-distilroberta-base")
model = AutoModelForSequenceClassification.from_pretrained("j-hartmann/emotion-english-distilroberta-base")
return tokenizer, model
tokenizer, emotion_model = load_model()
# Google Places API query function
def get_places_data(query, location, radius=5000, api_key="GOOGLE_API_KEY"):
params = {
"query": query,
"location": location,
"radius": radius,
"key": api_key
}
response = requests.get(url, params=params)
if response.status_code == 200:
data = response.json()
return data.get('results', [])
else:
return []
# Map generation function
def create_map(locations):
m = folium.Map(location=[21.3, -157.8], zoom_start=12)
for loc in locations:
name = loc.get("name", "No Name")
lat = loc['geometry']['location']['lat']
lng = loc['geometry']['location']['lng']
folium.Marker([lat, lng], popup=name).add_to(m)
return m._repr_html_() # Return HTML representation
# Sentiment Analysis function
def analyze_sentiment(user_input):
tokenizer = AutoTokenizer.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment")
model = AutoModelForSequenceClassification.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment")
inputs = tokenizer(user_input, return_tensors="pt")
with torch.no_grad():
outputs = model(**inputs)
predicted_class = torch.argmax(outputs.logits, dim=1).item()
sentiment = ["Negative", "Neutral", "Positive"][predicted_class]
return sentiment
# Chatbot function for user interaction
def bag_of_words(s, words):
bag = [0 for _ in range(len(words))]
s_words = word_tokenize(s)
s_words = [stemmer.stem(word.lower()) for word in s_words if word.lower() in words]
for se in s_words:
for i, w in enumerate(words):
if w == se:
bag[i] = 1
return np.array(bag)
def chatbot(message, history):
history = history or []
message = message.lower()
try:
results = model.predict([bag_of_words(message, words)])
results_index = np.argmax(results)
tag = labels[results_index]
for tg in data["intents"]:
if tg['tag'] == tag:
responses = tg['responses']
response = random.choice(responses)
break
else:
response = "I'm sorry, I didn't understand that. Could you please rephrase?"
except Exception as e:
response = f"An error occurred: {str(e)}"
history.append((message, response))
return history, history
# Emotion Detection function
def detect_emotion(user_input):
pipe = pipeline("text-classification", model=emotion_model, tokenizer=tokenizer)
result = pipe(user_input)
emotion = result[0]['label']
return emotion
# Scraping the website to extract phone number or email
def scrape_website_for_contact_info(website):
phone_number = "Not available"
email = "Not available"
try:
response = requests.get(website, timeout=5)
soup = BeautifulSoup(response.content, 'html.parser')
phone_match = re.search(r'$$?\+?[0-9]*$$?[0-9_\- $$$$]*', soup.get_text())
if phone_match:
phone_number = phone_match.group()
email_match = re.search(r'[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}', soup.get_text())
if email_match:
email = email_match.group()
except Exception as e:
print(f"Error scraping website {website}: {e}")
return phone_number, email
# Main Gradio interface for emotion detection and chatbot
def emotion_and_chatbot(user_input, history, query, location):
# Emotion Detection
emotion = detect_emotion(user_input)
sentiment = analyze_sentiment(user_input)
emotion_response = f"Emotion Detected: {emotion}. Sentiment: {sentiment}"
# Search Places (for wellness or other queries)
places_data = get_places_data(query, location)
places_map = create_map(places_data) if places_data else "No places found."
# Chatbot response
history, _ = chatbot(user_input, history)
return emotion_response, places_map, history, history
# Gradio interface setup
iface = gr.Interface(
fn=emotion_and_chatbot,
inputs=[
gr.Textbox(label="Enter your message", placeholder="How are you feeling?"),
"state", # Chat history
gr.Textbox(label="Search Query (e.g. wellness)", placeholder="e.g. therapist"),
gr.Textbox(label="Location (latitude,longitude)", placeholder="e.g. 21.3,-157.8")
],
outputs=[
gr.Textbox(label="Emotion and Sentiment"),
gr.HTML(label="Places Map"),
gr.Chatbot(label="Chatbot History"),
"state"
],
title="Wellbeing Chatbot with Emotion Detection & Location Search",
description="A chatbot that provides mental health support, analyzes emotions, and helps find wellness professionals near you."
)
# Launch Gradio app
if __name__ == "__main__":
iface.launch(debug=True) |