import os import gradio as gr import nltk import numpy as np import tflearn import random import json import pickle from nltk.tokenize import word_tokenize from nltk.stem.lancaster import LancasterStemmer from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline import googlemaps import folium import torch # Suppress TensorFlow GPU usage and warnings os.environ["CUDA_VISIBLE_DEVICES"] = "-1" os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3" # Ensure necessary NLTK resources nltk.download("punkt") stemmer = LancasterStemmer() # Load chatbot intents and training data with open("intents.json") as file: intents_data = json.load(file) with open("data.pickle", "rb") as f: words, labels, training, output = pickle.load(f) # Build chatbot model net = tflearn.input_data(shape=[None, len(training[0])]) net = tflearn.fully_connected(net, 8) net = tflearn.fully_connected(net, 8) net = tflearn.fully_connected(net, len(output[0]), activation="softmax") net = tflearn.regression(net) chatbot_model = tflearn.DNN(net) chatbot_model.load("MentalHealthChatBotmodel.tflearn") # Hugging Face emotion and sentiment detection models tokenizer_sentiment = AutoTokenizer.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment") model_sentiment = AutoModelForSequenceClassification.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment") tokenizer_emotion = AutoTokenizer.from_pretrained("j-hartmann/emotion-english-distilroberta-base") model_emotion = AutoModelForSequenceClassification.from_pretrained("j-hartmann/emotion-english-distilroberta-base") # Initialize Google Maps API client gmaps = googlemaps.Client(key=os.getenv("GOOGLE_API_KEY")) # Helper Functions def bag_of_words(s, words): """Convert user input into bag-of-words vector.""" bag = [0] * len(words) s_words = word_tokenize(s) s_words = [stemmer.stem(word.lower()) for word in s_words if word.isalnum()] for se in s_words: for i, w in enumerate(words): if w == se: bag[i] = 1 return np.array(bag) # Chatbot response logic def chatbot(message, history): """Generate chatbot response and update chat history.""" history = history or [] try: result = chatbot_model.predict([bag_of_words(message, words)]) tag = labels[np.argmax(result)] response = "I'm sorry, I'm not sure how to respond. 🤔" for intent in intents_data["intents"]: if intent["tag"] == tag: response = random.choice(intent["responses"]) break except Exception as e: response = f"Error: {e}" history.append((message, response)) # Append to the chatbot history return history, response # Sentiment detection function def analyze_sentiment(user_input): """Analyze sentiment and return emoji-mapped sentiment.""" inputs = tokenizer_sentiment(user_input, return_tensors="pt") with torch.no_grad(): outputs = model_sentiment(**inputs) sentiment_class = torch.argmax(outputs.logits, dim=1).item() sentiment_map = ["Negative 😔", "Neutral 😐", "Positive 😊"] return sentiment_map[sentiment_class] # Emotion detection function def detect_emotion(user_input): """Detect emotion from user input using Hugging Face emotion model.""" pipe = pipeline("text-classification", model=model_emotion, tokenizer=tokenizer_emotion) result = pipe(user_input) emotion = result[0]["label"].lower().strip() emotion_map = { "joy": "😊 Joy", "anger": "😠 Anger", "sadness": "😢 Sadness", "fear": "😨 Fear", "surprise": "😲 Surprise", "neutral": "😐 Neutral", } return emotion_map.get(emotion, "Unknown 🤔") # Generate suggestions based on emotion def generate_suggestions(emotion): """Generate resources and videos to help based on the emotion detected.""" emotion_key = emotion.lower() suggestions = { "joy": [ ["Relaxation Techniques", 'Visit'], ["Emotional Toolkit", 'Visit'], ["Stress Management", 'Visit'], ], "anger": [ ["Calming Techniques", 'Watch'], ["Manage Anger", 'Visit'], ], "fear": [ ["Coping with Anxiety", 'Visit'], ["Mindfulness Meditation", 'Watch'], ], "sadness": [ ["Overcoming Sadness", 'Watch'], ], "surprise": [ ["Managing Surprises", 'Visit'], ["Relaxation Video", 'Watch'], ], "neutral": [ ["General Tips", 'Read More'] ], } return suggestions.get(emotion_key, [["No specific suggestions available.", ""]]) # Google Maps integration def get_health_professionals_and_map(location, query): """Search nearby health professionals and generate map.""" try: if not location or not query: return ["Please provide a valid location and query."], "" geo_location = gmaps.geocode(location) if geo_location: lat, lng = geo_location[0]["geometry"]["location"].values() places_result = gmaps.places_nearby(location=(lat, lng), radius=10000, keyword=query)["results"] professionals = [] map_ = folium.Map(location=(lat, lng), zoom_start=13) for place in places_result: professionals.append(f"{place['name']} - {place.get('vicinity', 'No address available')}") folium.Marker( location=[place["geometry"]["location"]["lat"], place["geometry"]["location"]["lng"]], popup=f"{place['name']}" ).add_to(map_) return professionals, map_._repr_html_() return ["No professionals found."], "" except Exception as e: return [f"Error: {e}"], "" # Main application logic def app_function(user_message, location, query, history): chatbot_history, _ = chatbot(user_message, history) sentiment = analyze_sentiment(user_message) # Sentiment detection emotion = detect_emotion(user_message) # Emotion detection suggestions = generate_suggestions(emotion) # Get emotion-based suggestions professionals, map_html = get_health_professionals_and_map(location, query) # Google Maps integration return chatbot_history, sentiment, emotion, suggestions, professionals, map_html # Custom CSS custom_css = """ body { background: linear-gradient(135deg, #000, #ff5722); font-family: 'Roboto', sans-serif; color: white; } h1 { font-size: 4.5rem; font-weight: bold; text-align: center; color: white; text-shadow: 2px 2px 8px rgba(0, 0, 0, 0.4); } h2 { font-size: 2rem; text-align: center; font-weight: lighter; color: white; margin-bottom: 30px; } .button { background: linear-gradient(45deg, #ff5722, #ff9800) !important; border: none !important; padding: 12px 20px; border-radius: 8px; color: white !important; cursor: pointer; font-size: 16px; } """ # Gradio Application with gr.Blocks(css=custom_css) as app: gr.HTML("