File size: 3,764 Bytes
31b863d bd69f09 31b863d bd69f09 31b863d bd69f09 31b863d bd69f09 31b863d bd69f09 31b863d 2dcd8c5 bd69f09 2dcd8c5 31b863d bd69f09 2dcd8c5 bd69f09 31b863d 740ea71 31b863d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 |
import gradio as gr
import pandas as pd
from groq import Groq
# Initialize the Groq client with your API key
client = Groq(api_key="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")
# Define the system message for the model
system_message = {
"role": "system",
"content": "You are an experienced Fashion designer who provides valuable fashion advice, asks relevant questions, and keeps the conversation focused on the user's style and preferences."
}
# Function to reset the chat
def reset_chat():
return [], "New Chat"
# Function to handle questionnaire submission
def submit_questionnaire(name, age, gender, location):
# Store questionnaire responses in a DataFrame
questionnaire_data = {
"Name": name,
"Age": age,
"Gender": gender,
"Location": location
}
df = pd.DataFrame([questionnaire_data]) # Create DataFrame from dictionary
# Append to CSV file
df.to_csv("questionnaire_responses.csv", mode='a', header=not pd.io.common.file_exists("questionnaire_responses.csv"), index=False)
return "Thank you for completing the questionnaire!"
# Function to handle chat
def chat(user_input, messages, name, age, location, gender):
if user_input:
# Create a user profile string
user_profile_string = (
f"User profile: Name: {name}, Age: {age}, Location: {location}, "
f"Gender: {gender}"
)
# Prepare messages for the API call, including the profile and the conversation history
messages.append({"role": "user", "content": user_input})
messages.append(system_message)
messages.append({"role": "user", "content": user_profile_string})
try:
# Generate a response from the Groq API
completion = client.chat.completions.create(
model="llama3-8b-8192",
messages=messages,
temperature=1,
max_tokens=1024,
top_p=1,
stream=False,
)
# Ensure response is valid
if completion.choices and len(completion.choices) > 0:
response_content = completion.choices[0].message.content
else:
response_content = "Sorry, I couldn't generate a response."
except Exception as e:
response_content = f"Error: {str(e)}"
# Store assistant response in the chat history
messages.append({"role": "assistant", "content": response_content})
return messages, response_content
return messages, ""
# Gradio Interface
with gr.Blocks() as demo:
gr.Markdown("## Fashion Assistant Chatbot")
# Sidebar for user inputs
with gr.Row():
with gr.Column():
name = gr.Textbox(label="Name")
age = gr.Number(label="Age", value=25, minimum=1, maximum=100)
location = gr.Textbox(label="Location")
gender = gr.Radio(label="Gender", choices=["Male", "Female", "Other"])
submit_btn = gr.Button("Submit Questionnaire")
reset_btn = gr.Button("Reset Chat")
# Chat functionality
chatbox = gr.Chatbot(type='messages')
user_input = gr.Textbox(label="Your Message", placeholder="Type your message here...")
# Output message for feedback
output_message = gr.Textbox(label="Output Message", interactive=False)
# Connect the buttons to their respective functions
submit_btn.click(submit_questionnaire, inputs=[name, age, location, gender], outputs=output_message)
reset_btn.click(reset_chat, outputs=[chatbox, output_message])
user_input.submit(chat, inputs=[user_input, chatbox, name, age, location, gender], outputs=[chatbox, user_input])
# Run the app
demo.launch()
|