File size: 2,086 Bytes
b6b4a7e 73b1050 12b9be3 4910ade b6b4a7e bf0df63 b6b4a7e 866286c b6b4a7e 866286c b6b4a7e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 |
import os
import gradio as gr
from dotenv import load_dotenv
from openai import OpenAI
from prompts.main_prompt import MAIN_PROMPT
from prompts.initial_prompt import INITIAL_PROMPT
# ✅ Load API key from .env file
if os.path.exists(".env"):
load_dotenv(".env")
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
if not OPENAI_API_KEY:
raise ValueError("🚨 OpenAI API key is missing! Set it in your .env file.")
client = OpenAI(api_key=OPENAI_API_KEY)
def respond(user_message, history):
if not user_message:
return "", history
# ✅ Ensure proper message handling
try:
messages = [{"role": "system", "content": MAIN_PROMPT}]
for user_text, assistant_text in history:
if user_text:
messages.append({"role": "user", "content": user_text})
if assistant_text:
messages.append({"role": "assistant", "content": assistant_text})
messages.append({"role": "user", "content": user_message})
# ✅ Get AI response
completion = client.chat.completions.create(
model="gpt-4o",
messages=messages,
max_tokens=512,
temperature=0.7
)
assistant_reply = completion.choices[0].message.content
history.append((user_message, assistant_reply))
return "", history
except Exception as e:
return f"⚠️ An error occurred: {str(e)}", history
# ✅ Gradio UI Setup
with gr.Blocks() as demo:
gr.Markdown("## 🤖 AI-Guided Math PD Chatbot")
chatbot = gr.Chatbot(value=[("", INITIAL_PROMPT)], height=500)
state_history = gr.State([("", INITIAL_PROMPT)])
user_input = gr.Textbox(placeholder="Type your message here...", label="Your Input")
user_input.submit(
respond,
inputs=[user_input, state_history],
outputs=[user_input, chatbot]
).then(
fn=lambda _, h: h,
inputs=[user_input, chatbot],
outputs=[state_history]
)
if __name__ == "__main__":
demo.launch(server_name="0.0.0.0", server_port=7860)
|