Spaces:
Sleeping
Sleeping
import gradio as gr | |
import openai | |
import os | |
# Set OpenAI API Key | |
openai.api_key = os.getenv("TRY_NEW_THINGS") | |
openai.api_base = "https://api.groq.com/openai/v1" | |
# Function to get response from GROQ API | |
def get_groq_response(message, category): | |
system_message = "" | |
if category == "Stress Management": | |
system_message = "Provide soothing advice and tips to help the user manage stress. Be calm and empathetic." | |
elif category == "Career Advice": | |
system_message = "Provide professional and constructive career advice. Be encouraging and helpful." | |
elif category == "General": | |
system_message = "Provide general conversation. Be friendly and easygoing." | |
elif category == "Friendly Buddy": | |
system_message = "Respond as a supportive and fun friend. Be informal and light-hearted." | |
try: | |
response = openai.ChatCompletion.create( | |
model="llama-3.1-70b-versatile", | |
messages=[ | |
{"role": "system", "content": system_message}, | |
{"role": "user", "content": message} | |
] | |
) | |
return response.choices[0].message["content"] | |
except Exception as e: | |
return f"Error: {str(e)}" | |
# Chatbot function | |
def chatbot(user_input, category, history=[]): | |
bot_response = get_groq_response(user_input, category) | |
history.append((f"You: {user_input}", f"Bot: {bot_response}")) | |
return history, history | |
# Gradio Interface with enhanced styling | |
chat_interface = gr.Blocks(css=""" | |
body { | |
font-family: 'Poppins', sans-serif; | |
background: linear-gradient(120deg, #ff9a9e, #fad0c4, #a1c4fd); | |
animation: gradientBG 10s ease infinite; | |
margin: 0; | |
padding: 0; | |
color: #333; | |
} | |
@keyframes gradientBG { | |
0% { background-position: 0% 50%; } | |
50% { background-position: 100% 50%; } | |
100% { background-position: 0% 50%; } | |
} | |
button { | |
background: linear-gradient(90deg, #98c1d9, #ee9ca7); | |
color: white; | |
padding: 0.8rem 1.5rem; | |
font-size: 1rem; | |
font-weight: bold; | |
border-radius: 20px; | |
border: none; | |
cursor: pointer; | |
transition: transform 0.2s ease, background 0.2s ease; | |
} | |
button:hover { | |
background: linear-gradient(90deg, #ee9ca7, #98c1d9); | |
transform: scale(1.1); | |
} | |
header { | |
text-align: center; | |
margin-bottom: 20px; | |
padding: 10px; | |
border-radius: 15px; | |
background: linear-gradient(90deg, #ff758c, #ff7eb3); | |
color: white; | |
box-shadow: 0 4px 15px rgba(0, 0, 0, 0.2); | |
} | |
.chat-container { | |
border: 2px solid #ff7eb3; | |
background: rgba(255, 255, 255, 0.8); | |
border-radius: 15px; | |
padding: 15px; | |
box-shadow: 0 4px 10px rgba(0, 0, 0, 0.1); | |
max-height: 300px; | |
overflow-y: auto; | |
} | |
""") | |
with chat_interface: | |
with gr.Row(): | |
gr.Markdown("<h1 style='text-align:center;'>π Vibrant Personal Assistant Chatbot π</h1>") | |
with gr.Row(): | |
gr.Markdown("<p style='text-align:center;'>Select a category and type your message to get tailored responses.</p>") | |
with gr.Row(): | |
user_input = gr.Textbox(label="Your Message", placeholder="Type something...", lines=2) | |
category_dropdown = gr.Dropdown( | |
choices=["Stress Management", "Career Advice", "General", "Friendly Buddy"], | |
label="Choose Chat Category" | |
) | |
with gr.Row(): | |
send_button = gr.Button("Send") | |
with gr.Row(): | |
chatbot_output = gr.Chatbot(label="Chat History") | |
copy_button = gr.Button("Copy Response") | |
# Add functionality to handle interactions | |
def handle_chat(user_input, category, history): | |
if not user_input.strip(): | |
return history, history | |
updated_history, _ = chatbot(user_input, category, history) | |
return updated_history, updated_history | |
send_button.click( | |
handle_chat, | |
inputs=[user_input, category_dropdown, chatbot_output], | |
outputs=[chatbot_output, chatbot_output] | |
) | |
# Copy functionality | |
def copy_last_response(history): | |
if history: | |
return history[-1][1] # Returns the last bot response | |
return "No response to copy." | |
copy_button.click( | |
copy_last_response, | |
inputs=[chatbot_output], | |
outputs=[] | |
) | |
chat_interface.launch() | |