File size: 2,308 Bytes
3369d9f
 
 
 
2927735
3369d9f
2927735
 
 
 
 
 
 
 
 
 
3369d9f
cdc9be2
192dc63
3369d9f
2927735
 
3369d9f
 
 
 
 
 
 
 
 
2927735
 
3369d9f
 
2927735
192dc63
2927735
 
192dc63
2927735
 
 
 
 
 
3369d9f
 
2927735
3369d9f
 
 
2927735
3369d9f
 
 
 
 
 
4fa6af3
3369d9f
2927735
 
 
 
 
 
 
 
 
 
 
3369d9f
 
 
 
 
 
 
 
4fa6af3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
# Application file for Gradio App for OpenAI Model

import gradio as gr
import time
import datetime
import os

from lc_base.chain import openai_chain

# global time_diff, model_name, search_type
time_diff = 0
model_name="gpt-3.5-turbo-1106"
search_type = "stuff"
input_question = ""
model_response = ""
user_feedback = ""

dir = os.path.join("outputs", "combined", "policy_eu_asia_usa", "faiss_index")
# dir = os.path.join("outputs", "policy", "1", "faiss_index")

title = """<h1 align="center">ResearchBuddy</h1>"""
description = """<br><br><h3 align="center">This is a GPT based Research Buddy to assist in navigating new research topics.</h3>"""

def save_api_key(api_key):
    os.environ['OPENAI_API_KEY'] = str(api_key)
    return f"API Key saved in the environment: {api_key}"

def user(user_message, history):
    return "", history + [[user_message, None]]

def respond(message, chat_history):

    global time_diff, model_response, input_question    
    question = str(message)
    chain = openai_chain(inp_dir=dir)
    
    start_time = time.time()

    output = chain.get_response(query=question, k=10, model_name=model_name, type=search_type)
    print(output)

    # Update global variables to log
    time_diff = time.time() - start_time
    model_response = output
    input_question = question
    
    bot_message = output
    chat_history.append((message, bot_message))

    time.sleep(2)
    return " ", chat_history


with gr.Blocks(theme=gr.themes.Soft(primary_hue="emerald", neutral_hue="slate")) as chat:
    gr.HTML(title)
                 
    api_key_input = gr.Textbox(lines=1, label="Enter your OpenAI API Key")
    api_key_input_submit = api_key_input.submit(save_api_key, [api_key_input])

    chatbot = gr.Chatbot(height=750)
    msg = gr.Textbox(label="Send a message", placeholder="Send a message",
                             show_label=False, container=False)  

    with gr.Row():
        with gr.Column():
            gr.Examples([
                ["Explain these documents to me in simpler terms."],
                ["What does these documents talk about?"],

            ], inputs=msg, label= "Click on any example to copy in the chatbox"
            )


    msg.submit(respond, [msg, chatbot], [msg, chatbot])


    gr.HTML(description)


chat.queue()
chat.launch()