File size: 6,204 Bytes
ee83059
 
 
 
 
 
 
 
268a2a1
 
 
 
 
 
 
ee83059
 
 
 
 
 
 
 
268a2a1
ee83059
 
 
268a2a1
ee83059
268a2a1
 
 
 
 
 
 
 
 
 
 
 
 
 
ee83059
268a2a1
 
ee83059
 
 
 
 
 
268a2a1
ee83059
 
 
 
 
 
 
 
 
 
 
 
268a2a1
 
 
 
 
 
 
 
 
 
 
 
 
 
ee83059
 
 
 
 
 
 
 
 
268a2a1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ee83059
 
 
 
268a2a1
 
ee83059
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
268a2a1
 
 
 
 
 
 
 
 
 
 
 
 
 
ee83059
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
import os
from typing import Optional, Tuple

import gradio as gr
from langchain.chains import ConversationChain
from langchain.llms import OpenAI
from threading import Lock

from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.text_splitter import CharacterTextSplitter
from langchain.vectorstores.faiss import FAISS
from langchain.docstore.document import Document
from langchain.chains.question_answering import load_qa_chain



def load_chain():
    """Logic for loading the chain you want to use should go here."""
    llm = OpenAI(temperature=0)
    chain = ConversationChain(llm=llm)
    return chain


def set_openai_api_key(api_key):
    """Set the api key and return chain.
    If no api_key, then None is returned.
    """
    if api_key and api_key.startswith("sk-") and len(api_key) > 50:
        os.environ["OPENAI_API_KEY"] = api_key
        print("\n\n ++++++++++++++ Setting OpenAI API key ++++++++++++++ \n\n")
        print(str(datetime.datetime.now()) + ": Before OpenAI, OPENAI_API_KEY length: " + str(
            len(os.environ["OPENAI_API_KEY"])))
        llm = OpenAI(temperature=0, max_tokens=MAX_TOKENS)
        print(str(datetime.datetime.now()) + ": After OpenAI, OPENAI_API_KEY length: " + str(
            len(os.environ["OPENAI_API_KEY"])))
        chain, express_chain, memory = load_chain(TOOLS_DEFAULT_LIST, llm)

        # Pertains to question answering functionality
        embeddings = OpenAIEmbeddings()
        qa_chain = load_qa_chain(OpenAI(temperature=0), chain_type="stuff")

        print(str(datetime.datetime.now()) + ": After load_chain, OPENAI_API_KEY length: " + str(
            len(os.environ["OPENAI_API_KEY"])))
        os.environ["OPENAI_API_KEY"] = ""
        return chain, express_chain, llm, embeddings, qa_chain, memory
    return None, None, None, None, None, None

class ChatWrapper:

    def __init__(self):
        self.lock = Lock()
    def __call__(
        self, api_key: str, inp: str, history: Optional[Tuple[str, str]], chain: Optional[ConversationChain], use_embeddings, monologue:bool
    ):
        """Execute the chat functionality."""
        self.lock.acquire()
        try:
            history = history or []
            # If chain is None, that is because no API key was provided.
            if chain is None:
                history.append((inp, "Please paste your OpenAI key to use"))
                return history, history
            # Set OpenAI key
            import openai
            openai.api_key = api_key
            if not monologue:
                if use_embeddings:
                    if inp and inp.strip() != "":
                        if docsearch:
                            docs = docsearch.similarity_search(inp)
                            output = str(qa_chain.run(input_documents=docs, question=inp))
                        else:
                            output, hidden_text = "Please supply some text in the the Embeddings tab.", None
                    else:
                        output, hidden_text = "What's on your mind?", None
                else:
                    output, hidden_text = run_chain(chain, inp, capture_hidden_text=trace_chain)
            else:
                output, hidden_text = inp, None
            # Run chain and append input.
            output = chain.run(input=inp)
            history.append((inp, output))
        except Exception as e:
            raise e
        finally:
            self.lock.release()
        return history, history

# Pertains to question answering functionality
def update_embeddings(embeddings_text, embeddings, qa_chain):
    if embeddings_text:
        text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
        texts = text_splitter.split_text(embeddings_text)

        docsearch = FAISS.from_texts(texts, embeddings)
        print("Embeddings updated")
        return docsearch


# Pertains to question answering functionality
def update_use_embeddings(widget, state):
    if widget:
        state = widget
        return state

chat = ChatWrapper()

block = gr.Blocks(css=".gradio-container {background-color: lightgray}")


with gt.Tab("Chat"):
    with gr.Row():
        gr.Markdown("<h3><center>LangChain Demo</center></h3>")

        openai_api_key_textbox = gr.Textbox(
            placeholder="Paste your OpenAI API key (sk-...)",
            show_label=False,
            lines=1,
            type="password",
        )

    chatbot = gr.Chatbot()

    with gr.Row():
        message = gr.Textbox(
            label="Treat it like ChatGPT",
            placeholder="Buat soalan darjah enam tentang biologi",
            lines=1,
        )
        submit = gr.Button(value="Send", variant="secondary").style(full_width=False)

    gr.Examples(
        examples=[
            "Siapakah PM Malaysia",
            "create multiple choice question around chair?",
            "Whats 2 + 2?",
        ],
        inputs=message,
    )


with gr.Tab("Embeddings"):
        embeddings_text_box = gr.Textbox(label="Enter text for embeddings and hit Create:",
                                         lines=20)

        with gr.Row():
            use_embeddings_cb = gr.Checkbox(label="Use embeddings", value=False)
            use_embeddings_cb.change(update_use_embeddings, inputs=[use_embeddings_cb, use_embeddings_state],
                                     outputs=[use_embeddings_state])

            embeddings_text_submit = gr.Button(value="Create", variant="secondary").style(full_width=False)
            embeddings_text_submit.click(update_embeddings,
                                         inputs=[embeddings_text_box, embeddings_state, qa_chain_state],
                                         outputs=[docsearch_state])

    state = gr.State()
    agent_state = gr.State()

    submit.click(chat, inputs=[openai_api_key_textbox, message, state, agent_state], outputs=[chatbot, state])
    message.submit(chat, inputs=[openai_api_key_textbox, message, state, agent_state], outputs=[chatbot, state])

    openai_api_key_textbox.change(
        set_openai_api_key,
        inputs=[openai_api_key_textbox],
        outputs=[agent_state],
    )

block.launch(debug=True)