File size: 3,626 Bytes
ce5b5d6
 
 
 
 
3e5fe2f
 
ce5b5d6
5e48bae
ce5b5d6
 
 
 
 
 
 
 
 
 
 
 
 
 
5e48bae
ce5b5d6
5e48bae
ce5b5d6
5e48bae
c81a341
ce5b5d6
5e48bae
ce5b5d6
 
 
 
 
 
5e48bae
ce5b5d6
 
5e48bae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ce5b5d6
 
 
5e48bae
 
 
ce5b5d6
 
 
 
5e48bae
ce5b5d6
 
5e48bae
ce5b5d6
 
5e48bae
ce5b5d6
 
5e48bae
ce5b5d6
5e48bae
ce5b5d6
 
5e48bae
ce5b5d6
5e48bae
ce5b5d6
5e48bae
ce5b5d6
 
 
5e48bae
ce5b5d6
 
5e48bae
ce5b5d6
5e48bae
ce5b5d6
 
 
5e48bae
ce5b5d6
 
 
 
 
 
 
 
 
 
5e48bae
ce5b5d6
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
import os
import gradio as gr
from dotenv import load_dotenv
from openai import OpenAI
from prompts.initial_prompt import INITIAL_PROMPT
from prompts.main_prompt import TASK_PROMPT


# Load the OpenAI API key from .env file
if os.path.exists(".env"):
    load_dotenv(".env")

OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")

client = OpenAI(api_key=OPENAI_API_KEY)


def gpt_call(history, user_message,
             model="gpt-4o-mini",
             max_tokens=512,
             temperature=0.7,
             top_p=0.95):
    """
    Calls OpenAI's ChatCompletion API to generate responses.
    - history: [(user_text, assistant_text), ...]
    - user_message: User's latest input
    """
    # System message (TASK_PROMPT) at the beginning
    messages = [{"role": "system", "content": TASK_PROMPT}]
    
    # Convert history into OpenAI format
    for user_text, assistant_text in history:
        if user_text:
            messages.append({"role": "user", "content": user_text})
        if assistant_text:
            messages.append({"role": "assistant", "content": assistant_text})

    # Add the latest user input
    messages.append({"role": "user", "content": user_message})

    # AI-controlled gradual guidance
    if "bar model" in user_message.lower():
        return "Great! You've started using a bar model. Can you explain how you divided it? What does each section represent?"
    
    elif "double number line" in user_message.lower():
        return "Nice! How does your number line show the relationship between time and distance? Did you mark the correct intervals?"
    
    elif "ratio table" in user_message.lower():
        return "Good choice! Before I check, how did you determine the ratio for 1 hour?"
    
    elif "graph" in user_message.lower():
        return "Graphs are powerful! What key points did you plot, and why?"
    
    else:
        # OpenAI API call (fallback response)
        completion = client.chat.completions.create(
            model=model,
            messages=messages,
            max_tokens=max_tokens,
            temperature=temperature,
            top_p=top_p
        )
        return completion.choices[0].message.content


def respond(user_message, history):
    """
    Handles user input and chatbot response in Gradio.
    - user_message: The latest input from the user.
    - history: A list of (user, assistant) message pairs.
    """
    if not user_message:
        return "", history

    # Generate AI response
    assistant_reply = gpt_call(history, user_message)

    # Append to history
    history.append((user_message, assistant_reply))

    # Return the updated history and clear the input box
    return "", history


##############################
#  Gradio Chatbot UI
##############################
with gr.Blocks() as demo:
    gr.Markdown("## AI-Guided Teacher PD Chatbot")

    # Initial chatbot message (starts with the task)
    chatbot = gr.Chatbot(
        value=[("", INITIAL_PROMPT)],
        height=500
    )

    # Chat history state
    state_history = gr.State([("", INITIAL_PROMPT)])

    # User input box
    user_input = gr.Textbox(
        placeholder="Type your response here...",
        label="Your Input"
    )

    # When user submits input → respond() updates chatbot
    user_input.submit(
        respond,
        inputs=[user_input, state_history],
        outputs=[user_input, chatbot]
    ).then(
        fn=lambda _, h: h,
        inputs=[user_input, chatbot],
        outputs=[state_history]
    )

# Launch the chatbot
if __name__ == "__main__":
    demo.launch(server_name="0.0.0.0", server_port=7860, share=True)