File size: 3,371 Bytes
3369d9f
 
 
 
2927735
3369d9f
2927735
 
 
 
 
 
 
 
 
 
 
3369d9f
cdc9be2
192dc63
3369d9f
2927735
 
3369d9f
 
 
 
 
 
 
 
 
2927735
 
3369d9f
 
2927735
192dc63
2927735
 
192dc63
2927735
 
 
 
 
 
3369d9f
 
2927735
3369d9f
 
 
2927735
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3369d9f
 
 
 
 
 
4fa6af3
3369d9f
2927735
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3369d9f
 
2927735
3369d9f
 
2927735
 
 
3369d9f
 
 
 
 
 
4fa6af3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
# Application file for Gradio App for OpenAI Model

import gradio as gr
import time
import datetime
import os

from lc_base.chain import openai_chain
from driveapi.drive import upload_chat_to_drive

# global time_diff, model_name, search_type
time_diff = 0
model_name="gpt-3.5-turbo-1106"
search_type = "stuff"
input_question = ""
model_response = ""
user_feedback = ""

dir = os.path.join("outputs", "combined", "policy_eu_asia_usa", "faiss_index")
# dir = os.path.join("outputs", "policy", "1", "faiss_index")

title = """<h1 align="center">ResearchBuddy</h1>"""
description = """<br><br><h3 align="center">This is a GPT based Research Buddy to assist in navigating new research topics.</h3>"""

def save_api_key(api_key):
    os.environ['OPENAI_API_KEY'] = str(api_key)
    return f"API Key saved in the environment: {api_key}"

def user(user_message, history):
    return "", history + [[user_message, None]]

def respond(message, chat_history):

    global time_diff, model_response, input_question    
    question = str(message)
    chain = openai_chain(inp_dir=dir)
    
    start_time = time.time()

    output = chain.get_response(query=question, k=10, model_name=model_name, type=search_type)
    print(output)

    # Update global variables to log
    time_diff = time.time() - start_time
    model_response = output
    input_question = question
    
    bot_message = output
    chat_history.append((message, bot_message))

    time.sleep(2)
    return " ", chat_history

def save_feedback(feedback):
    global user_feedback
    user_feedback = feedback

    curr_date = datetime.datetime.now()
    file_name = f"chat_{curr_date.day}_{curr_date.month}_{curr_date.hour}_{curr_date.minute}.csv"
    log_data = [
        ["Question", "Response", "Model", "Time", "Feedback"],
        [input_question, model_response, model_name, time_diff, user_feedback]
    ]

    if user_feedback == "Yes" or  feedback == "No":
        upload_chat_to_drive(log_data, file_name)

def default_feedback():
    return "πŸ€”"

with gr.Blocks(theme=gr.themes.Soft(primary_hue="emerald", neutral_hue="slate")) as chat:
    gr.HTML(title)
                 
    api_key_input = gr.Textbox(lines=1, label="Enter your OpenAI API Key")
    api_key_input_submit = api_key_input.submit(save_api_key, [api_key_input])

    chatbot = gr.Chatbot(height=750)
    msg = gr.Textbox(label="Send a message", placeholder="Send a message",
                             show_label=False, container=False)  

    with gr.Row():
        with gr.Column():
            gr.Examples([
                ["Explain these documents to me in simpler terms."],
                ["What does these documents talk about?"],

            ], inputs=msg, label= "Click on any example to copy in the chatbox"
            )

        with gr.Column():
            feedback_radio = gr.Radio(
                choices=["Yes", "No", "πŸ€”"],
                value=["πŸ€”"],
                label="Did you like the latest response?",
                info="Selecting Yes/No will send the following diagnostic data - Question, Response, Time Taken",
            )

    msg.submit(respond, [msg, chatbot], [msg, chatbot])
    msg.submit(default_feedback, outputs=[feedback_radio])


    feedback_radio.change(
        fn=save_feedback,
        inputs=[feedback_radio]
    )

    gr.HTML(description)


chat.queue()
chat.launch()