Spaces:
Sleeping
Sleeping
File size: 4,067 Bytes
7109c50 0fd1b32 2a9be90 7109c50 0fd1b32 7109c50 2a9be90 7109c50 0fd1b32 7109c50 0fd1b32 7109c50 0fd1b32 7109c50 0fd1b32 7109c50 0fd1b32 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 |
import gradio as gr
import openai
import langdetect as detect
import os
# Set up OpenAI API with custom Groq endpoint
openai.api_key = os.getenv("PrepBuddy_API_KEY")
openai.api_base = "https://api.groq.com/openai/v1"
# Function to get the Groq model's response
def get_groq_response(message, mode):
try:
# Use a system prompt tailored to the selected mode
motivational_message = (
"Keep pushing forward! You've got this. Programming might seem tough at first, but every step you take "
"is one step closer to mastering it. Let's score 70 on 70 in programming together!"
)
if mode == "Code":
system_prompt = (
"You are GS C PrepBuddy, a friendly and motivational AI specializing in C programming. "
"Your goal is to assist users by providing clear, concise, and well-commented C code "
"to solve their problems. Explain the logic where necessary. "
+ motivational_message
)
elif mode == "Flowchart":
system_prompt = (
"You are GS C PrepBuddy, a friendly and motivational AI specializing in C programming. "
"Provide a detailed textual description of flowcharts to represent solutions for C programming problems. "
+ motivational_message
)
elif mode == "Algorithm":
system_prompt = (
"You are GS C PrepBuddy, a friendly and motivational AI specializing in C programming. "
"Provide step-by-step algorithms to solve the user's query with clarity. "
+ motivational_message
)
elif mode == "Exam Preparation":
system_prompt = (
"You are GS C PrepBuddy, a friendly and motivational AI dedicated to helping students prepare for their exams. "
"Provide explanations, theoretical concepts, sample questions, and problem-solving techniques in C programming. "
+ motivational_message
)
else:
system_prompt = "You are GS C PrepBuddy, a helpful and motivational AI assistant."
response = openai.ChatCompletion.create(
model="llama-3.1-70b-versatile",
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": message}
]
)
return response.choices[0].message["content"]
except Exception as e:
return f"Error: {str(e)}"
# Function to handle chatbot interactions
def chatbot(user_input, output_format, history=[]):
try:
# Get the response based on the selected output format
bot_response = get_groq_response(user_input, output_format)
# Append to conversation history
history.append((user_input, bot_response))
return history, history # Return updated chat history and state
except Exception as e:
return [(user_input, f"Error: {str(e)}")], history
# Gradio Interface setup
chat_interface = gr.Interface(
fn=chatbot, # Function to call for chatbot interaction
inputs=[
"text", # User input
gr.Dropdown(
["Code", "Flowchart", "Algorithm", "Exam Preparation"],
label="Output Format",
value="Code"
),
"state" # Chat history
],
outputs=["chatbot", "state"], # Outputs: the chat and updated history (state)
live=False, # Disable live chat, responses shown after submit
title="GS C PrepBuddy", # Title of the app
description=(
"Welcome to GS Programming PrepBuddy! 💻✨\n\n"
"Let's make programming fun and score 70 on 70 in your C programming exams!\n\n"
"Choose your output format—Code, Flowchart, Algorithm, or Exam Preparation.\n\n"
"Ask your query, and I'll guide you every step of the way!\n\n"
"Made by Satyam Singhal"
)
)
# Launch the Gradio interface
if __name__ == "__main__":
chat_interface.launch()
|