File size: 4,681 Bytes
3b538c3
 
 
86cfbfc
3b538c3
164bf10
 
3b538c3
ae32d37
3b538c3
 
 
 
 
 
 
 
 
fda45a4
3b538c3
fda45a4
3b538c3
 
 
 
 
 
 
 
 
fda45a4
3b538c3
 
 
164bf10
3b538c3
fda45a4
 
 
3b538c3
ae32d37
7864b80
3b538c3
0be388c
 
 
 
 
 
 
 
 
 
 
 
 
3b538c3
0e00a92
a7ca5b7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3b538c3
 
 
 
 
 
 
 
 
 
 
 
 
 
164bf10
 
 
 
ae32d37
 
f3685f7
1d2e1a7
f3685f7
3b538c3
 
 
 
 
 
 
00c0489
 
 
3b538c3
 
00c0489
3b538c3
 
 
00c0489
ace21f8
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
import gradio as gr
import pandas as pd
from functools import partial
from ai_classroom_suite.UIBaseComponents import *

# history is a list of list  
# [[user_input_str, bot_response_str], ...]

class EchoingTutor:
    # create basic initialization function
    def __init__(self):
        self.conversation_memory = []
        self.flattened_conversation = ''

    def add_user_message(self, user_message):
        self.conversation_memory.append([user_message, None])
        self.flattened_conversation = self.flattened_conversation + '\n\n' + 'User: ' + user_message

    def get_tutor_reply(self, user_message):
        # get tutor message
        tutor_message = "You said: " + user_message
        # add tutor message to conversation memory
        self.conversation_memory[-1][1] = tutor_message
        self.flattened_conversation = self.flattened_conversation + '\nAI: ' + tutor_message

    def forget_conversation(self):
        self.conversation_memory = []
        self.flattened_conversation = ''


### Chatbot Functions ###
def add_user_message(user_message, chat_tutor):
  """Display user message and update chat history to include it."""
  chat_tutor.add_user_message(user_message)
  return chat_tutor.conversation_memory, chat_tutor

def get_tutor_reply(user_message, chat_tutor):
  chat_tutor.get_tutor_reply(user_message)
  return gr.update(value="", interactive=True), chat_tutor.conversation_memory, chat_tutor

def get_conversation_history(chat_tutor):
    return chat_tutor.conversation_memory, chat_tutor


css="""
#sources-container {
  overflow: scroll !important; /* Needs to override default formatting */
  /*max-height: 20em; */ /* Arbitrary value */
}
#sources-container > div { padding-bottom: 1em !important; /* Arbitrary value */ }
.short-height > * > * { min-height: 0 !important; }
.translucent { opacity: 0.5; }
.textbox_label { padding-bottom: .5em; }
"""

with gr.Blocks(css=css) as demo:
    #initialize tutor (with state)
    study_tutor = gr.State(EchoingTutor())

    # Title
    gr.Markdown("# Basic Interaction Tutor")

    # API Authentication functionality
    with gr.Box():
        gr.Markdown("### OpenAI API Key ")
        gr.HTML("""<span>Embed your OpenAI API key below; if you haven't created one already, visit
          <a href="https://platform.openai.com/account/api-keys">platform.openai.com/account/api-keys</a>
            to sign up for an account and get your personal API key</span>""",
            elem_classes="textbox_label")
        api_input = gr.Textbox(show_label=False, type="password", container=False, autofocus=True,
                      placeholder="●●●●●●●●●●●●●●●●●", value='')
        api_input.submit(fn=embed_key, inputs=[api_input, study_tutor], outputs=study_tutor)
        api_input.blur(fn=embed_key, inputs=[api_input, study_tutor], outputs=study_tutor)
        
    # Chatbot interface
    gr.Markdown("""
    ## Chat with the Model
    Description here
    """)

    with gr.Row(equal_height=True):
        with gr.Column(scale=2):
          chatbot = gr.Chatbot()
          with gr.Row():
            user_chat_input = gr.Textbox(label="User input", scale=9)
            user_chat_submit = gr.Button("Ask/answer model", scale=1)
    
    user_chat_submit.click(add_user_message, 
                           [user_chat_input, study_tutor], 
                           [chatbot, study_tutor], queue=False).then(
        get_tutor_reply, [user_chat_input, study_tutor], [user_chat_input, chatbot, study_tutor], queue=True)

    # Testing purpose
    test_btn = gr.Button("View your chat history")
    chat_history = gr.JSON(label = "conversation history")
    test_btn.click(get_conversation_history, inputs=[study_tutor], outputs=[chat_history, study_tutor])
    
    with gr.Blocks():
        gr.Markdown("""
        ## Export Your Chat History
        Export your chat history as a .json, .txt, or .csv file
        """)
        with gr.Row():
            export_dialogue_button_json = gr.Button("JSON")
            export_dialogue_button_txt = gr.Button("TXT")
            export_dialogue_button_csv = gr.Button("CSV")
        
        file_download = gr.Files(label="Download here",
                                 file_types=['.json', '.txt', '.csv'], type="file", visible=False)
        
        export_dialogue_button_json.click(save_json, study_tutor, file_download, show_progress=True)
        export_dialogue_button_txt.click(save_txt, study_tutor, file_download, show_progress=True)
        export_dialogue_button_csv.click(save_csv, study_tutor, file_download, show_progress=True)


demo.queue().launch(server_name='0.0.0.0', server_port=7860)