File size: 4,277 Bytes
718c030
3b538c3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
00c0489
3b538c3
 
164bf10
 
3b538c3
ae32d37
3b538c3
 
 
 
 
 
 
 
 
fda45a4
3b538c3
fda45a4
3b538c3
 
 
 
 
 
 
 
 
fda45a4
3b538c3
 
 
164bf10
3b538c3
fda45a4
 
 
3b538c3
ae32d37
7864b80
3b538c3
 
 
0e00a92
3b538c3
 
 
 
 
 
 
 
 
 
 
 
 
 
164bf10
 
 
 
ae32d37
 
f3685f7
1d2e1a7
f3685f7
3b538c3
 
 
 
 
 
 
00c0489
 
 
3b538c3
 
00c0489
3b538c3
 
 
00c0489
3b538c3
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114

import gradio as gr
import pandas as pd
from functools import partial

def save_chatbot_dialogue(chat_tutor, save_type):

    formatted_convo = pd.DataFrame(chat_tutor.conversation_memory, columns=['user', 'chatbot'])

    output_fname = f'tutoring_conversation.{save_type}'

    if save_type == 'csv':
        formatted_convo.to_csv(output_fname, index=False)
    elif save_type == 'json':
        formatted_convo.to_json(output_fname, orient='records')
    elif save_type == 'txt':
        temp = formatted_convo.apply(lambda x: 'User: {0}\nAI: {1}'.format(x[0], x[1]), axis=1)
        temp = '\n\n'.join(temp.tolist())
        with open(output_fname, 'w') as f:
            f.write(temp)
    else:
        gr.update(value=None, visible=False)
    
    return gr.update(value=output_fname, visible=True)

save_json = partial(save_chatbot_dialogue, save_type='json')
save_txt = partial(save_chatbot_dialogue, save_type='txt')
save_csv = partial(save_chatbot_dialogue, save_type='csv')


# history is a list of list  
# [[user_input_str, bot_response_str], ...]

class EchoingTutor:
    # create basic initialization function
    def __init__(self):
        self.conversation_memory = []
        self.flattened_conversation = ''

    def add_user_message(self, user_message):
        self.conversation_memory.append([user_message, None])
        self.flattened_conversation = self.flattened_conversation + '\n\n' + 'User: ' + user_message

    def get_tutor_reply(self, user_message):
        # get tutor message
        tutor_message = "You said: " + user_message
        # add tutor message to conversation memory
        self.conversation_memory[-1][1] = tutor_message
        self.flattened_conversation = self.flattened_conversation + '\nAI: ' + tutor_message

    def forget_conversation(self):
        self.conversation_memory = []
        self.flattened_conversation = ''


### Chatbot Functions ###
def add_user_message(user_message, chat_tutor):
  """Display user message and update chat history to include it."""
  chat_tutor.add_user_message(user_message)
  return chat_tutor.conversation_memory, chat_tutor

def get_tutor_reply(user_message, chat_tutor):
  chat_tutor.get_tutor_reply(user_message)
  return gr.update(value="", interactive=True), chat_tutor.conversation_memory, chat_tutor

def get_conversation_history(chat_tutor):
    return chat_tutor.conversation_memory, chat_tutor

with gr.Blocks() as demo:
    #initialize tutor (with state)
    study_tutor = gr.State(EchoingTutor())
        
    # Chatbot interface
    gr.Markdown("""
    ## Chat with the Model
    Description here
    """)

    with gr.Row(equal_height=True):
        with gr.Column(scale=2):
          chatbot = gr.Chatbot()
          with gr.Row():
            user_chat_input = gr.Textbox(label="User input", scale=9)
            user_chat_submit = gr.Button("Ask/answer model", scale=1)
    
    user_chat_submit.click(add_user_message, 
                           [user_chat_input, study_tutor], 
                           [chatbot, study_tutor], queue=False).then(
        get_tutor_reply, [user_chat_input, study_tutor], [user_chat_input, chatbot, study_tutor], queue=True)

    # Testing purpose
    test_btn = gr.Button("View your chat history")
    chat_history = gr.JSON(label = "conversation history")
    test_btn.click(get_conversation_history, inputs=[study_tutor], outputs=[chat_history, study_tutor])
    
    with gr.Blocks():
        gr.Markdown("""
        ## Export Your Chat History
        Export your chat history as a .json, .txt, or .csv file
        """)
        with gr.Row():
            export_dialogue_button_json = gr.Button("JSON")
            export_dialogue_button_txt = gr.Button("TXT")
            export_dialogue_button_csv = gr.Button("CSV")
        
        file_download = gr.Files(label="Download here",
                                 file_types=['.json', '.txt', '.csv'], type="file", visible=False)
        
        export_dialogue_button_json.click(save_json, study_tutor, file_download, show_progress=True)
        export_dialogue_button_txt.click(save_txt, study_tutor, file_download, show_progress=True)
        export_dialogue_button_csv.click(save_csv, study_tutor, file_download, show_progress=True)
    
demo.queue()
demo.launch()