File size: 2,963 Bytes
804ba71
da67f32
 
562552a
 
 
 
 
 
44b202b
562552a
 
 
e4848d6
 
 
44b202b
e4848d6
562552a
 
 
 
 
 
 
 
 
 
 
 
 
 
44b202b
562552a
44b202b
562552a
 
 
44b202b
562552a
 
 
 
 
 
da67f32
 
562552a
44b202b
 
e4848d6
 
44b202b
562552a
e4848d6
562552a
 
 
 
 
 
 
 
e4848d6
 
562552a
 
 
 
da67f32
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import openai
import random
import time
import gradio as gr
import os
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.vectorstores import DeepLake
from langchain.chat_models import ChatOpenAI
from langchain.chains import ConversationalRetrievalChain
from langchain.llms import OpenAI

def set_api_key(key):
    os.environ["OPENAI_API_KEY"] = key
    return f"Your API Key has been set to {key}"

def reset_api_key():
    os.environ["OPENAI_API_KEY"] = ""
    return api_key_status: gr.update(placeholder="Your API Key has been reset")
    
def get_api_key():
    api_key = os.getenv("OPENAI_API_KEY")
    return api_key

def respond(message, chat_history):
    
    # Get embeddings
    embeddings = OpenAIEmbeddings()
    
    #Connect to existing vectorstore
    db = DeepLake(dataset_path="./documentation_db", embedding_function=embeddings, read_only=True)
    #Set retriever settings
    retriever = db.as_retriever(search_kwargs={"distance_metric":'cos',
                                               "fetch_k":10,
                                               "maximal_marginal_relevance":True,
                                               "k":10})

    # Create ChatOpenAI and ConversationalRetrievalChain
    model = ChatOpenAI(model='gpt-3.5-turbo')
    qa = ConversationalRetrievalChain.from_llm(model, retriever, OpenAI(temperature=0))

    chat_history=[]
    bot_message = qa({"question": message, "chat_history": chat_history})
    chat_history.append((message, bot_message['answer']))
    time.sleep(1)
    return "", chat_history

with gr.Blocks() as demo:
    with gr.Tab("OpenAI API Key Submission"):
        api_input = gr.Textbox(label = "API Key - Your key will be active for 10 minutes. The bar below shows the time left.",
                               placeholder = "Please provide your OpenAI API key here.")
        api_key_status = gr.Textbox(label = "API Key Status",
                                         placeholder = "Your API Key has been reset or has not be set yet. Please enter your key.",
                                         interactive = False)
        api_submit_button = gr.Button("Submit")
        api_reset_button = gr.Button("Clear API Key from session")
    
    with gr.Tab("Coding Assistant"):
        api_check_button = gr.Button("Get API Key")
        api_print = gr.Textbox(label = "OpenAI API Key - Please ensure the API Key is set correctly")
        chatbot = gr.Chatbot(label="ChatGPT Powered Coding Assistant")
        msg = gr.Textbox(label="User Prompt", placeholder="Your Query Here")
        clear = gr.Button("Clear")
        
    api_submit_button.click(set_api_key, inputs=api_input, outputs=api_key_status)
    api_reset_button.click(reset_api_key, outputs=api_key_status)
    api_check_button.click(get_api_key, outputs=api_print)
    msg.submit(respond, [msg, chatbot], [msg, chatbot])
    clear.click(lambda: None, None, chatbot, queue=False)
    
demo.launch()