Spaces:
Sleeping
Sleeping
File size: 2,032 Bytes
ce5b5d6 10a976f ce5b5d6 10a976f ce5b5d6 920cdf0 0adaba1 920cdf0 ce5b5d6 9586e3d ce5b5d6 9586e3d 9e63e25 10a976f 9e63e25 889bba5 9586e3d 9e63e25 0adaba1 ce5b5d6 10a976f ce5b5d6 0adaba1 aff9c1d 621cfb5 10a976f 621cfb5 10a976f ce5b5d6 889bba5 ce5b5d6 0adaba1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 |
import os
import gradio as gr
from openai import OpenAI
from prompts.main_prompt import MAIN_PROMPT
from prompts.initial_prompt import INITIAL_PROMPT
# ✅ Load API Key
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
if not OPENAI_API_KEY:
raise ValueError("⚠️ Missing OpenAI API Key! Set it in Hugging Face 'Settings' → 'Secrets'.")
client = OpenAI(api_key=OPENAI_API_KEY)
# ✅ Ensure AI asks users first & only helps if needed
def respond(user_message, history):
if not user_message:
return "", history
# ✅ AI waits for user answers before solving
try:
assistant_reply = client.chat.completions.create(
model="gpt-4o",
messages=[
{"role": "system", "content": MAIN_PROMPT},
*[
{"role": "user", "content": u} if i % 2 == 0 else {"role": "assistant", "content": a}
for i, (u, a) in enumerate(history)
],
{"role": "user", "content": user_message}
],
max_tokens=300, # ✅ Prevents cutting off messages
temperature=0.7,
).choices[0].message.content
except Exception as e:
assistant_reply = f"⚠️ Error: {str(e)}"
history.append((user_message, assistant_reply))
return "", history
# ✅ Fix Gradio UI to Start Properly
with gr.Blocks() as demo:
gr.Markdown("# **AI-Guided Math PD Chatbot**")
chatbot = gr.Chatbot(
value=[("", INITIAL_PROMPT)], # ✅ Starts with an introduction message
height=500
)
state_history = gr.State([("", INITIAL_PROMPT)]) # ✅ Ensures step-by-step history
user_input = gr.Textbox(placeholder="Type your message here...", label="Your Input")
user_input.submit(
respond,
inputs=[user_input, state_history],
outputs=[user_input, chatbot]
).then(
fn=lambda _, h: h,
inputs=[user_input, chatbot],
outputs=[state_history]
)
if __name__ == "__main__":
demo.launch()
|