import os import gradio as gr import pandas as pd from functools import partial from ai_classroom_suite.UIBaseComponents import * # history is a list of list # [[user_input_str, bot_response_str], ...] class EchoingTutor(SlightlyDelusionalTutor): def add_user_message(self, user_message): self.conversation_memory.append([user_message, None]) self.flattened_conversation = self.flattened_conversation + '\n\n' + 'User: ' + user_message def get_tutor_reply(self, user_message): # get tutor message tutor_message = "You said: " + user_message # add tutor message to conversation memory self.conversation_memory[-1][1] = tutor_message self.flattened_conversation = self.flattened_conversation + '\nAI: ' + tutor_message def forget_conversation(self): self.conversation_memory = [] self.flattened_conversation = '' ### Chatbot Functions ### def add_user_message(user_message, chat_tutor): """Display user message and update chat history to include it.""" chat_tutor.add_user_message(user_message) return chat_tutor.conversation_memory, chat_tutor def get_tutor_reply(user_message, chat_tutor): chat_tutor.get_tutor_reply(user_message) return gr.update(value="", interactive=True), chat_tutor.conversation_memory, chat_tutor def get_conversation_history(chat_tutor): return chat_tutor.conversation_memory, chat_tutor def get_instructor_prompt(fileobj): file_path = fileobj.name f = open(file_path, "r") instructor_prompt = f.read() return str(instructor_prompt) def embed_prompt(chat_tutor, instructor_prompt): instructor_prompt = instructor_prompt.value os.environ["SECRET_PROMPT"] = instructor_prompt return os.environ.get("SECRET_PROMPT") with gr.Blocks() as demo: #initialize tutor (with state) study_tutor = gr.State(EchoingTutor()) # Instead of ask students to provide key, the key is now directly provided by the instructor api_input = os.environ.get("OPENAI_API_KEY") embed_key(api_input, study_tutor) with gr.Tab("For Students"): # Chatbot interface gr.Markdown(""" ## Chat with the Model Description here """) with gr.Row(equal_height=True): with gr.Column(scale=2): chatbot = gr.Chatbot() with gr.Row(): user_chat_input = gr.Textbox(label="User input", scale=9) user_chat_submit = gr.Button("Ask/answer model", scale=1) user_chat_submit.click(add_user_message, [user_chat_input, study_tutor], [chatbot, study_tutor], queue=False).then( get_tutor_reply, [user_chat_input, study_tutor], [user_chat_input, chatbot, study_tutor], queue=True) # Testing purpose test_btn = gr.Button("View your chat history") chat_history = gr.JSON(label = "conversation history") test_btn.click(get_conversation_history, inputs=[study_tutor], outputs=[chat_history, study_tutor]) with gr.Blocks(): gr.Markdown(""" ## Export Your Chat History Export your chat history as a .json, .txt, or .csv file """) with gr.Row(): export_dialogue_button_json = gr.Button("JSON") export_dialogue_button_txt = gr.Button("TXT") export_dialogue_button_csv = gr.Button("CSV") file_download = gr.Files(label="Download here", file_types=['.json', '.txt', '.csv'], type="file", visible=False) export_dialogue_button_json.click(save_json, study_tutor, file_download, show_progress=True) export_dialogue_button_txt.click(save_txt, study_tutor, file_download, show_progress=True) export_dialogue_button_csv.click(save_csv, study_tutor, file_download, show_progress=True) with gr.Tab("Instructor Only"): with gr.Blocks(): file_input = gr.File(label="Load a .txt or .py file", file_types=['.py', '.txt'], type="file", elem_classes="short-height") instructor_prompt = gr.Textbox(visible=True) file_input.submit(get_instructor_prompt, file_input, instructor_prompt) #gr.Interface(get_instructor_prompt, file_input, instructor_prompt) #test = gr.Textbox(label="testing") #instructor_prompt.change(embed_prompt, instructor_prompt, test) demo.queue().launch(server_name='0.0.0.0', server_port=7860)