File size: 3,664 Bytes
ce5b5d6
 
 
 
 
cf70031
 
 
 
 
 
 
 
ce5b5d6
e35958e
ce5b5d6
 
 
 
 
 
e35958e
ce5b5d6
cf70031
ce5b5d6
cf70031
ce5b5d6
 
e35958e
cf70031
ce5b5d6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cf70031
ce5b5d6
 
 
 
cf70031
 
 
ce5b5d6
 
 
 
cf70031
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ce5b5d6
e35958e
cf70031
e35958e
ce5b5d6
cf70031
 
ce5b5d6
 
 
cf70031
ce5b5d6
 
e35958e
ce5b5d6
cf70031
ce5b5d6
cf70031
ce5b5d6
 
 
cf70031
ce5b5d6
 
cf70031
ce5b5d6
 
 
 
 
cf70031
ce5b5d6
 
 
 
 
 
 
 
 
 
cf70031
ce5b5d6
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
import os
import gradio as gr
from dotenv import load_dotenv
from openai import OpenAI
from prompts.initial_prompt import INITIAL_PROMPT
from prompts.main_prompt import (
    MAIN_PROMPT,
    get_prompt_for_problem,
    get_ccss_practice_standards,
    get_problem_posing_task,
    get_creativity_discussion,
    get_summary,
)

# Load API key from .env file
if os.path.exists(".env"):
    load_dotenv(".env")

OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
client = OpenAI(api_key=OPENAI_API_KEY)

def gpt_call(history, user_message, model="gpt-4o-mini", max_tokens=512, temperature=0.7, top_p=0.95):
    """
    Calls OpenAI Chat API to generate responses.
    - history: [(user_text, assistant_text), ...]
    - user_message: latest message from user
    """
    messages = [{"role": "system", "content": MAIN_PROMPT}]

    # Add history to conversation
    for user_text, assistant_text in history:
        if user_text:
            messages.append({"role": "user", "content": user_text})
        if assistant_text:
            messages.append({"role": "assistant", "content": assistant_text})

    messages.append({"role": "user", "content": user_message})

    completion = client.chat.completions.create(
        model=model,
        messages=messages,
        max_tokens=max_tokens,
        temperature=temperature,
        top_p=top_p
    )

    return completion.choices[0].message.content

def respond(user_message, history):
    """
    Handles user input and chatbot responses.
    - user_message: latest user input
    - history: previous chat history
    """
    if not user_message:
        return "", history

    # If user selects a problem number, redirect to the appropriate prompt
    if user_message.strip() in ["1", "2", "3"]:
        assistant_reply = get_prompt_for_problem(user_message.strip())

    # If user is at reflection stage, ask about CCSS Practice Standards
    elif user_message.lower().strip() == "common core":
        assistant_reply = get_ccss_practice_standards()

    # If user is at problem-posing stage, ask them to create a new problem
    elif user_message.lower().strip() == "problem posing":
        assistant_reply = get_problem_posing_task()

    # If user is at creativity discussion stage, ask for their thoughts
    elif user_message.lower().strip() == "creativity":
        assistant_reply = get_creativity_discussion()

    # If user requests a summary, provide the final learning summary
    elif user_message.lower().strip() == "summary":
        assistant_reply = get_summary()

    else:
        # Continue conversation normally with AI guidance
        assistant_reply = gpt_call(history, user_message)

    # Update history
    history.append((user_message, assistant_reply))
    return "", history

##############################
#  Gradio UI Setup
##############################
with gr.Blocks() as demo:
    gr.Markdown("## AI-Guided Math PD Chatbot")

    # Initialize chatbot with first message
    chatbot = gr.Chatbot(
        value=[("", INITIAL_PROMPT)],  # Initial system message
        height=500
    )

    # Maintain chat history state
    state_history = gr.State([("", INITIAL_PROMPT)])

    # User input box
    user_input = gr.Textbox(
        placeholder="Type your message here...",
        label="Your Input"
    )

    # Submit button
    user_input.submit(
        respond,
        inputs=[user_input, state_history],
        outputs=[user_input, chatbot]
    ).then(
        fn=lambda _, h: h,
        inputs=[user_input, chatbot],
        outputs=[state_history]
    )

# Launch app
if __name__ == "__main__":
    demo.launch(server_name="0.0.0.0", server_port=7860, share=True)