import os import gradio as gr import pandas as pd from functools import partial from ai_classroom_suite.UIBaseComponents import * # TODO: class EchoingTutor(SlightlyDelusionalTutor): def add_user_message(self, user_message): self.conversation_memory.append([user_message, None]) self.flattened_conversation = self.flattened_conversation + '\n\n' + 'User: ' + user_message def get_tutor_reply(self, user_message): # get tutor message tutor_message = "You said: " + user_message # add tutor message to conversation memory self.conversation_memory[-1][1] = tutor_message self.flattened_conversation = self.flattened_conversation + '\nAI: ' + tutor_message def forget_conversation(self): self.conversation_memory = [] self.flattened_conversation = '' ### Chatbot Functions ### def add_user_message(user_message, chat_tutor): """Display user message and update chat history to include it.""" chat_tutor.add_user_message(user_message) return chat_tutor.conversation_memory, chat_tutor def get_tutor_reply(user_message, chat_tutor): chat_tutor.get_tutor_reply(user_message) return gr.update(value="", interactive=True), chat_tutor.conversation_memory, chat_tutor def get_conversation_history(chat_tutor): return chat_tutor.conversation_memory, chat_tutor def create_prompt_store(chat_tutor, vs_button, upload_files, openai_auth): text_segs = [] upload_segs = [] if upload_files: print(upload_files) upload_fnames = [f.name for f in upload_files] upload_segs = get_document_segments(upload_fnames, 'file', chunk_size=700, chunk_overlap=100) # get the full list of everything all_segs = text_segs + upload_segs print(all_segs) # create the vector store and update tutor vs_db, vs_retriever = create_local_vector_store(all_segs, search_kwargs={"k": 2}) chat_tutor.vector_store = vs_db chat_tutor.vs_retriever = vs_retriever # create the tutor chain if not chat_tutor.api_key_valid or not chat_tutor.openai_auth: chat_tutor = embed_key(openai_auth, chat_tutor) qa_chain = create_tutor_mdl_chain(kind="retrieval_qa", mdl=chat_tutor.chat_llm, retriever = chat_tutor.vs_retriever, return_source_documents=True) chat_tutor.tutor_chain = qa_chain # return the store return chat_tutor, gr.update(interactive=True, value='Tutor Initialized!') ### Instructor Interface Helper Functions ### def get_instructor_prompt(fileobj): file_path = fileobj.name f = open(file_path, "r") instructor_prompt = f.read() return instructor_prompt def embed_prompt(instructor_prompt): os.environ["SECRET_PROMPT"] = instructor_prompt return os.environ.get("SECRET_PROMPT") ### User Interfaces ### with gr.Blocks() as demo: #initialize tutor (with state) study_tutor = gr.State(EchoingTutor()) # Student interface with gr.Tab("For Students"): # Chatbot interface gr.Markdown(""" ## Chat with the Model Description here """) with gr.Row(equal_height=True): with gr.Column(scale=2): chatbot = gr.Chatbot() with gr.Row(): user_chat_input = gr.Textbox(label="User input", scale=9) user_chat_submit = gr.Button("Ask/answer model", scale=1) user_chat_submit.click(add_user_message, [user_chat_input, study_tutor], [chatbot, study_tutor], queue=False).then( get_tutor_reply, [user_chat_input, study_tutor], [user_chat_input, chatbot, study_tutor], queue=True) # Testing purpose test_btn = gr.Button("View your chat history") chat_history = gr.JSON(label = "conversation history") test_btn.click(get_conversation_history, inputs=[study_tutor], outputs=[chat_history, study_tutor]) # Download conversation history file with gr.Blocks(): gr.Markdown(""" ## Export Your Chat History Export your chat history as a .json, .txt, or .csv file """) with gr.Row(): export_dialogue_button_json = gr.Button("JSON") export_dialogue_button_txt = gr.Button("TXT") export_dialogue_button_csv = gr.Button("CSV") file_download = gr.Files(label="Download here", file_types=['.json', '.txt', '.csv'], type="file", visible=False) export_dialogue_button_json.click(save_json, study_tutor, file_download, show_progress=True) export_dialogue_button_txt.click(save_txt, study_tutor, file_download, show_progress=True) export_dialogue_button_csv.click(save_csv, study_tutor, file_download, show_progress=True) # Instructor interface with gr.Tab("Instructor Only"): # API Authentication functionality # Instead of ask students to provide key, the key is now provided by the instructor with gr.Box(): gr.Markdown("### OpenAI API Key ") gr.HTML("""Embed your OpenAI API key below; if you haven't created one already, visit platform.openai.com/account/api-keys to sign up for an account and get your personal API key""", elem_classes="textbox_label") api_input = gr.Textbox(show_label=False, type="password", container=False, autofocus=True, placeholder="●●●●●●●●●●●●●●●●●", value='') api_input.submit(fn=embed_key, inputs=[api_input, study_tutor], outputs=study_tutor) api_input.blur(fn=embed_key, inputs=[api_input, study_tutor], outputs=study_tutor) """ Another way to permanently set the key is to directly go to Settings -> Variables and secrets -> Secrets Then replace OPENAI_API_KEY value with whatever openai key of the instructor. """ # api_input = os.environ.get("OPENAI_API_KEY") # embed_key(api_input, study_tutor) # Upload secret prompt functionality # The instructor will provide a secret prompt/persona to the tutor with gr.Blocks(): # testing purpose, change visible to False at deployment test_secret = gr.Textbox(label="Current secret prompt", value=os.environ.get("SECRET_PROMPT"), visible=True) file_input = gr.File(label="Load a .txt or .py file", file_types=['.py', '.txt'], type="file", elem_classes="short-height") # Verify prompt content instructor_prompt = gr.Textbox(label="Verify your prompt content", visible=True) file_input.upload(fn=get_instructor_prompt, inputs=file_input, outputs=instructor_prompt) # Set the secret prompt in this session and embed it to the study tutor prompt_submit_btn = gr.Button("Submit") prompt_submit_btn.click( fn=embed_prompt, inputs=instructor_prompt, outputs=test_secret ).then( fn=create_prompt_store, inputs=[study_tutor, prompt_submit_btn, file_input, api_input], outputs=[study_tutor, prompt_submit_btn] ) # TODO: may need a way to set the secret prompt permanently in settings/secret demo.queue().launch(server_name='0.0.0.0', server_port=7860)