File size: 2,937 Bytes
2ce377c
 
 
 
 
 
 
9868d22
a60e09d
2ce377c
 
 
953015d
2ce377c
 
 
 
 
ad03ba1
2ce377c
 
 
 
9868d22
 
 
a60e09d
2ce377c
ad03ba1
2ce377c
 
 
 
 
 
ad03ba1
 
2ce377c
 
 
 
 
 
 
 
 
953015d
2ce377c
 
953015d
2ce377c
953015d
 
2ce377c
 
953015d
 
 
 
c0008a2
2ce377c
 
 
 
 
 
c0008a2
2ce377c
 
 
 
 
 
c0008a2
 
 
 
 
 
 
 
 
2ce377c
 
 
 
 
 
 
4d4d824
2ce377c
953015d
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
import os
from typing import Optional, Tuple

import gradio as gr
import pickle
from query_data import get_chain
from threading import Lock
from ingest_data import get_vectorstore





class ChatWrapper:

    def __init__(self):
        self.lock = Lock()
    def __call__(
        self, inp: str, history: Optional[Tuple[str, str]],chain
    ):
        """Execute the chat functionality."""
        self.lock.acquire()
        try:
            vs = get_vectorstore("vectorstore.pkl")
            with open(vs, "rb") as f:
                vectorstore = pickle.load(f)
                
            history = history or []
            chain = get_chain(vectorstore)
            # If chain is None, that is because no API key was provided.
            if chain is None:
                history.append((inp, "Please paste your OpenAI key to use"))
                return history, history
            # Set OpenAI key
            import openai
            
            openai.api_key = os.environ["OPENAI_API_KEY"]
            # Run chain and append input.
            output = chain({"question": inp, "chat_history": history})["answer"]
            history.append((inp, output))
        except Exception as e:
            raise e
        finally:
            self.lock.release()
        return history, history


chat = ChatWrapper()

block = gr.Blocks(css=".gradio-container {background-color: #04A7E3}")

image_url = "https://www.thebotforge.io/wp-content/uploads/2022/01/the-bot-forge-logo-100px.png"
html = "<center><img src='%s' width='300'></center>" % image_url
with block:
    with gr.Row():
        gr.HTML(html)

    with gr.Row():
        gr.Markdown(
            "<h2><center>The Bot Forge</h2></center><h3> <center></center></h3>")

    chatbot = gr.Chatbot()

    with gr.Row():
        message = gr.Textbox(
            label="What's your question?",
            placeholder="Ask questions about the conversational AI and The Bot Forge",
            lines=1,
        )
        submit = gr.Button(value="Send", variant="secondary").style(full_width=False)

    gr.Examples(
        examples=[
            "what is a chatbot",
            "what is conversation design",
            "does the bot forge have expertise in this",
            "how much does it cost to create a chatbot",
            "what are some of the channels you can deploy to?",
            "explain nlu",
            "does the bot forge have experience using dialogflow",
            "can a chatbot be used as a sales assistant"

        ],
        inputs=message,
    )

    state = gr.State()
    agent_state = gr.State()

    submit.click(chat, inputs=[message, state, agent_state], outputs=[chatbot, state])


def echo(name, request: gr.Request):
    if request:
        print("Request headers dictionary:", request.headers)
        print("IP address:", request.client.host)
        print("Body", request.body)
    return name


block.launch(debug=True)