Spaces:
Build error
Build error
File size: 4,606 Bytes
dac5204 3b538c3 86cfbfc 3b538c3 164bf10 3b538c3 dac5204 3b538c3 fda45a4 3b538c3 fda45a4 3b538c3 fda45a4 3b538c3 164bf10 3b538c3 fda45a4 3b538c3 ae32d37 7864b80 eb4fba4 bf8aa3e fa132d6 c624f0d e4de761 f934fb5 fa132d6 f934fb5 b2cb3b4 9c5d18e 901b0c1 92a0a4b e6fae38 cddb36c f934fb5 cddb36c fa132d6 f934fb5 cddb36c eb4fba4 67283e0 eb4fba4 67283e0 cddb36c 46f0e4d cddb36c f934fb5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 |
import os
import gradio as gr
import pandas as pd
from functools import partial
from ai_classroom_suite.UIBaseComponents import *
# history is a list of list
# [[user_input_str, bot_response_str], ...]
class EchoingTutor(SlightlyDelusionalTutor):
def add_user_message(self, user_message):
self.conversation_memory.append([user_message, None])
self.flattened_conversation = self.flattened_conversation + '\n\n' + 'User: ' + user_message
def get_tutor_reply(self, user_message):
# get tutor message
tutor_message = "You said: " + user_message
# add tutor message to conversation memory
self.conversation_memory[-1][1] = tutor_message
self.flattened_conversation = self.flattened_conversation + '\nAI: ' + tutor_message
def forget_conversation(self):
self.conversation_memory = []
self.flattened_conversation = ''
### Chatbot Functions ###
def add_user_message(user_message, chat_tutor):
"""Display user message and update chat history to include it."""
chat_tutor.add_user_message(user_message)
return chat_tutor.conversation_memory, chat_tutor
def get_tutor_reply(user_message, chat_tutor):
chat_tutor.get_tutor_reply(user_message)
return gr.update(value="", interactive=True), chat_tutor.conversation_memory, chat_tutor
def get_conversation_history(chat_tutor):
return chat_tutor.conversation_memory, chat_tutor
def get_instructor_prompt(fileobj):
file_path = fileobj.name
f = open(file_path, "r")
instructor_prompt = f.read()
return str(instructor_prompt)
def embed_prompt(chat_tutor, instructor_prompt):
instructor_prompt = instructor_prompt.value
os.environ["SECRET_PROMPT"] = instructor_prompt
return os.environ.get("SECRET_PROMPT")
with gr.Blocks() as demo:
#initialize tutor (with state)
study_tutor = gr.State(EchoingTutor())
# Instead of ask students to provide key, the key is now directly provided by the instructor
api_input = os.environ.get("OPENAI_API_KEY")
embed_key(api_input, study_tutor)
with gr.Tab("For Students"):
# Chatbot interface
gr.Markdown("""
## Chat with the Model
Description here
""")
with gr.Row(equal_height=True):
with gr.Column(scale=2):
chatbot = gr.Chatbot()
with gr.Row():
user_chat_input = gr.Textbox(label="User input", scale=9)
user_chat_submit = gr.Button("Ask/answer model", scale=1)
user_chat_submit.click(add_user_message,
[user_chat_input, study_tutor],
[chatbot, study_tutor], queue=False).then(
get_tutor_reply, [user_chat_input, study_tutor], [user_chat_input, chatbot, study_tutor], queue=True)
# Testing purpose
test_btn = gr.Button("View your chat history")
chat_history = gr.JSON(label = "conversation history")
test_btn.click(get_conversation_history, inputs=[study_tutor], outputs=[chat_history, study_tutor])
with gr.Blocks():
gr.Markdown("""
## Export Your Chat History
Export your chat history as a .json, .txt, or .csv file
""")
with gr.Row():
export_dialogue_button_json = gr.Button("JSON")
export_dialogue_button_txt = gr.Button("TXT")
export_dialogue_button_csv = gr.Button("CSV")
file_download = gr.Files(label="Download here", file_types=['.json', '.txt', '.csv'], type="file", visible=False)
export_dialogue_button_json.click(save_json, study_tutor, file_download, show_progress=True)
export_dialogue_button_txt.click(save_txt, study_tutor, file_download, show_progress=True)
export_dialogue_button_csv.click(save_csv, study_tutor, file_download, show_progress=True)
with gr.Tab("Instructor Only"):
with gr.Blocks():
file_input = gr.File(label="Load a .txt or .py file",
file_types=['.py', '.txt'], type="file",
elem_classes="short-height")
instructor_prompt = gr.Textbox(visible=True)
file_input.submit(get_instructor_prompt, file_input, instructor_prompt)
#gr.Interface(get_instructor_prompt, file_input, instructor_prompt)
#test = gr.Textbox(label="testing")
#instructor_prompt.change(embed_prompt, instructor_prompt, test)
demo.queue().launch(server_name='0.0.0.0', server_port=7860) |