Spaces:
Build error
Build error
import os | |
import gradio as gr | |
import pandas as pd | |
from functools import partial | |
from ai_classroom_suite.UIBaseComponents import * | |
### User Interface Chatbot Functions ### | |
def get_tutor_reply(chat_tutor): | |
chat_tutor.get_tutor_reply() | |
return gr.update(value="", interactive=True), chat_tutor.conversation_memory, chat_tutor | |
def get_conversation_history(chat_tutor): | |
return chat_tutor.conversation_memory, chat_tutor | |
### Instructor Interface Helper Functions ### | |
def get_instructor_prompt(fileobj): | |
# get file path | |
file_path = fileobj.name | |
with open(file_path, "r") as f: | |
instructor_prompt = f.read() | |
return instructor_prompt | |
def embed_prompt(prompt, chat_tutor): | |
# update secret | |
os.environ["SECRET_PROMPT"] = prompt | |
# update tutor | |
chat_tutor.learning_objectives = prompt | |
return os.environ.get("SECRET_PROMPT"), chat_tutor | |
### User Interfaces ### | |
with gr.Blocks() as demo: | |
#initialize tutor (with state) | |
study_tutor = gr.State(SlightlyDelusionalTutor()) | |
# Student interface | |
with gr.Tab("For Students"): | |
# Chatbot interface | |
gr.Markdown(""" | |
## Chat with the Model | |
Description here | |
""") | |
with gr.Row(equal_height=True): | |
with gr.Column(scale=2): | |
chatbot = gr.Chatbot() | |
with gr.Row(): | |
user_chat_input = gr.Textbox(label="User input", scale=9) | |
user_chat_submit = gr.Button("Ask/answer model", scale=1) | |
# First add user's message to the conversation history | |
# Then get reply from the tutor and add that to the conversation history | |
user_chat_submit.click( | |
fn = add_user_message, inputs = [user_chat_input, study_tutor], outputs = [user_chat_input, chatbot, study_tutor], queue=False | |
).then( | |
fn = get_tutor_reply, inputs = [study_tutor], outputs = [user_chat_input, chatbot, study_tutor], queue=True | |
) | |
# Testing the chat history storage, can be deleted at deployment | |
with gr.Blocks(): | |
test_btn = gr.Button("View your chat history") | |
chat_history = gr.JSON(label = "conversation history") | |
test_btn.click(get_conversation_history, inputs=[study_tutor], outputs=[chat_history, study_tutor]) | |
# Download conversation history file | |
with gr.Blocks(): | |
gr.Markdown(""" | |
## Export Your Chat History | |
Export your chat history as a .json, .txt, or .csv file | |
""") | |
with gr.Row(): | |
export_dialogue_button_json = gr.Button("JSON") | |
export_dialogue_button_txt = gr.Button("TXT") | |
export_dialogue_button_csv = gr.Button("CSV") | |
file_download = gr.Files(label="Download here", file_types=['.json', '.txt', '.csv'], type="file", visible=False) | |
export_dialogue_button_json.click(save_json, study_tutor, file_download, show_progress=True) | |
export_dialogue_button_txt.click(save_txt, study_tutor, file_download, show_progress=True) | |
export_dialogue_button_csv.click(save_csv, study_tutor, file_download, show_progress=True) | |
# Instructor interface | |
with gr.Tab("Instructor Only"): | |
""" | |
API Authentication functionality | |
Instead of ask students to provide key, the key is now provided by the instructor. | |
To permanently set the key, go to Settings -> Variables and secrets -> Secrets, | |
then replace OPENAI_API_KEY value with whatever openai key of the instructor. | |
""" | |
api_input = gr.Textbox(show_label=False, type="password", visible=False, value=os.environ.get("OPENAI_API_KEY")) | |
# Upload secret prompt functionality | |
# The instructor will provide a secret prompt/persona to the tutor | |
with gr.Blocks(): | |
# testing purpose, change visible to False at deployment | |
view_secret = gr.Textbox(label="Current secret prompt", value=os.environ.get("SECRET_PROMPT"), visible=False) | |
# Prompt instructor to upload the secret file | |
file_input = gr.File(label="Load a .txt or .py file", file_types=['.py', '.txt'], type="file", elem_classes="short-height") | |
# Verify prompt content | |
instructor_prompt = gr.Textbox(label="Verify your prompt content", visible=True) | |
file_input.upload(fn=get_instructor_prompt, inputs=file_input, outputs=instructor_prompt) | |
# Placeholders components | |
text_input_none = gr.Textbox(visible=False) | |
file_input_none = gr.File(visible=False) | |
instructor_input_none = gr.TextArea(visible=False) | |
learning_objectives_none = gr.Textbox(visible=False) | |
# Set the secret prompt in this session and embed it to the study tutor | |
prompt_submit_btn = gr.Button("Submit") | |
prompt_submit_btn.click( | |
fn=embed_prompt, inputs=[instructor_prompt, study_tutor], outputs=[view_secret, study_tutor] | |
).then( | |
fn=create_reference_store, | |
inputs=[study_tutor, prompt_submit_btn, instructor_prompt, file_input_none, instructor_input_none, api_input, instructor_prompt], | |
outputs=[study_tutor, prompt_submit_btn] | |
) | |
# TODO: The instructor prompt is now only set in session if not go to Settings/secret, | |
# to "permanently" set the secret prompt not seen by the students who use this space, | |
# one possible way is to recreate the instructor interface in another space, | |
# and load it here to chain with the student interface | |
# TODO: Currently, the instructor prompt is handled as text input and stored in the vector store (and in the learning objective), | |
# which means the tutor now is still a question-answering tutor who viewed the prompt as context (but not really acting based on it). | |
# We need to find a way to provide the prompt directly to the model and set its status. | |
demo.queue().launch(server_name='0.0.0.0', server_port=7860) |