File size: 3,409 Bytes
3b538c3
 
 
b41394d
3b538c3
164bf10
 
3b538c3
ae32d37
3b538c3
 
 
 
 
 
 
 
 
fda45a4
3b538c3
fda45a4
3b538c3
 
 
 
 
 
 
 
 
fda45a4
3b538c3
 
 
164bf10
3b538c3
fda45a4
 
 
3b538c3
ae32d37
7864b80
3b538c3
 
 
0e00a92
3b538c3
 
 
 
 
 
 
 
 
 
 
 
 
 
164bf10
 
 
 
ae32d37
 
f3685f7
1d2e1a7
f3685f7
3b538c3
 
 
 
 
 
 
00c0489
 
 
3b538c3
 
00c0489
3b538c3
 
 
00c0489
3b538c3
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import gradio as gr
import pandas as pd
from functools import partial
from ai_classroom_suite.UIBaseComponents import *

# history is a list of list  
# [[user_input_str, bot_response_str], ...]

class EchoingTutor:
    # create basic initialization function
    def __init__(self):
        self.conversation_memory = []
        self.flattened_conversation = ''

    def add_user_message(self, user_message):
        self.conversation_memory.append([user_message, None])
        self.flattened_conversation = self.flattened_conversation + '\n\n' + 'User: ' + user_message

    def get_tutor_reply(self, user_message):
        # get tutor message
        tutor_message = "You said: " + user_message
        # add tutor message to conversation memory
        self.conversation_memory[-1][1] = tutor_message
        self.flattened_conversation = self.flattened_conversation + '\nAI: ' + tutor_message

    def forget_conversation(self):
        self.conversation_memory = []
        self.flattened_conversation = ''


### Chatbot Functions ###
def add_user_message(user_message, chat_tutor):
  """Display user message and update chat history to include it."""
  chat_tutor.add_user_message(user_message)
  return chat_tutor.conversation_memory, chat_tutor

def get_tutor_reply(user_message, chat_tutor):
  chat_tutor.get_tutor_reply(user_message)
  return gr.update(value="", interactive=True), chat_tutor.conversation_memory, chat_tutor

def get_conversation_history(chat_tutor):
    return chat_tutor.conversation_memory, chat_tutor

with gr.Blocks() as demo:
    #initialize tutor (with state)
    study_tutor = gr.State(EchoingTutor())
        
    # Chatbot interface
    gr.Markdown("""
    ## Chat with the Model
    Description here
    """)

    with gr.Row(equal_height=True):
        with gr.Column(scale=2):
          chatbot = gr.Chatbot()
          with gr.Row():
            user_chat_input = gr.Textbox(label="User input", scale=9)
            user_chat_submit = gr.Button("Ask/answer model", scale=1)
    
    user_chat_submit.click(add_user_message, 
                           [user_chat_input, study_tutor], 
                           [chatbot, study_tutor], queue=False).then(
        get_tutor_reply, [user_chat_input, study_tutor], [user_chat_input, chatbot, study_tutor], queue=True)

    # Testing purpose
    test_btn = gr.Button("View your chat history")
    chat_history = gr.JSON(label = "conversation history")
    test_btn.click(get_conversation_history, inputs=[study_tutor], outputs=[chat_history, study_tutor])
    
    with gr.Blocks():
        gr.Markdown("""
        ## Export Your Chat History
        Export your chat history as a .json, .txt, or .csv file
        """)
        with gr.Row():
            export_dialogue_button_json = gr.Button("JSON")
            export_dialogue_button_txt = gr.Button("TXT")
            export_dialogue_button_csv = gr.Button("CSV")
        
        file_download = gr.Files(label="Download here",
                                 file_types=['.json', '.txt', '.csv'], type="file", visible=False)
        
        export_dialogue_button_json.click(save_json, study_tutor, file_download, show_progress=True)
        export_dialogue_button_txt.click(save_txt, study_tutor, file_download, show_progress=True)
        export_dialogue_button_csv.click(save_csv, study_tutor, file_download, show_progress=True)
    
demo.queue()
demo.launch()