import os import gradio as gr import pandas as pd from functools import partial from ai_classroom_suite.UIBaseComponents import * # history is a list of list # [[user_input_str, bot_response_str], ...] class EchoingTutor(SlightlyDelusionalTutor): def add_user_message(self, user_message): self.conversation_memory.append([user_message, None]) self.flattened_conversation = self.flattened_conversation + '\n\n' + 'User: ' + user_message def get_tutor_reply(self, user_message): # get tutor message tutor_message = "You said: " + user_message # add tutor message to conversation memory self.conversation_memory[-1][1] = tutor_message self.flattened_conversation = self.flattened_conversation + '\nAI: ' + tutor_message def forget_conversation(self): self.conversation_memory = [] self.flattened_conversation = '' ### Chatbot Functions ### def add_user_message(user_message, chat_tutor): """Display user message and update chat history to include it.""" chat_tutor.add_user_message(user_message) return chat_tutor.conversation_memory, chat_tutor def get_tutor_reply(user_message, chat_tutor): chat_tutor.get_tutor_reply(user_message) return gr.update(value="", interactive=True), chat_tutor.conversation_memory, chat_tutor def get_conversation_history(chat_tutor): return chat_tutor.conversation_memory, chat_tutor with gr.Blocks() as demo: #initialize tutor (with state) study_tutor = gr.State(EchoingTutor()) # Instead of ask students to provide key, the key is now directly provided by the instructor embed_key(os.environ.get("OPENAI_API_KEY"), study_tutor) # Reference document functionality (building vector stores) with gr.Box(): gr.Markdown("### Add Reference Documents") # TODO Add entry for path to vector store (should be disabled for now) with gr.Row(equal_height=True): text_input = gr.TextArea(label='Copy and paste your text below', lines=2) file_input = gr.Files(label="Load a .txt or .pdf file", file_types=['.pdf', '.txt'], type="file", elem_classes="short-height") instructor_input = gr.TextArea(label='Enter vector store URL, if given by instructor (WIP)', value='', lines=2, interactive=False, elem_classes="translucent") learning_objectives = gr.Textbox(label='If provided by your instructor, please input your learning objectives for this session', value='') # Adding the button to submit all of the settings and create the Chat Tutor Chain. with gr.Row(): vs_build_button = gr.Button(value = 'Start Studying with Your Tutor!', scale=1) vs_build_button.click(disable_until_done, vs_build_button, vs_build_button) \ .then(create_reference_store, [study_tutor, vs_build_button, text_input, file_input, instructor_input, api_input, learning_objectives], [study_tutor, vs_build_button]) # Chatbot interface gr.Markdown(""" ## Chat with the Model Description here """) with gr.Row(equal_height=True): with gr.Column(scale=2): chatbot = gr.Chatbot() with gr.Row(): user_chat_input = gr.Textbox(label="User input", scale=9) user_chat_submit = gr.Button("Ask/answer model", scale=1) user_chat_submit.click(add_user_message, [user_chat_input, study_tutor], [chatbot, study_tutor], queue=False).then( get_tutor_reply, [user_chat_input, study_tutor], [user_chat_input, chatbot, study_tutor], queue=True) # Testing purpose test_btn = gr.Button("View your chat history") chat_history = gr.JSON(label = "conversation history") test_btn.click(get_conversation_history, inputs=[study_tutor], outputs=[chat_history, study_tutor]) with gr.Blocks(): gr.Markdown(""" ## Export Your Chat History Export your chat history as a .json, .txt, or .csv file """) with gr.Row(): export_dialogue_button_json = gr.Button("JSON") export_dialogue_button_txt = gr.Button("TXT") export_dialogue_button_csv = gr.Button("CSV") file_download = gr.Files(label="Download here", file_types=['.json', '.txt', '.csv'], type="file", visible=False) export_dialogue_button_json.click(save_json, study_tutor, file_download, show_progress=True) export_dialogue_button_txt.click(save_txt, study_tutor, file_download, show_progress=True) export_dialogue_button_csv.click(save_csv, study_tutor, file_download, show_progress=True) demo.queue().launch(server_name='0.0.0.0', server_port=7860)