File size: 2,496 Bytes
1f62c32
 
b61f27b
 
 
 
1f62c32
b61f27b
 
 
 
1f62c32
 
b61f27b
1f62c32
 
 
b61f27b
 
 
 
 
477ff0c
 
 
 
3eda7dc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
477ff0c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
from brander import prompting

import gradio as gr
import openai


def greet(topic: str):
    completion = openai.ChatCompletion.create(
        model="gpt-3.5-turbo",
        messages=[
            {
                "role": "system",
                "content": prompting.PROMPT_TEMPLATE.format(topic=topic),
            },
            {"role": "user", "content": prompting.EXAMPLE_INPUT},
            {"role": "assistant", "content": prompting.EXAMPLE_OUTPUT},
            {"role": "user", "content": topic},
        ],
    )
    return completion.choices[0].message.content


# interface = gr.Interface(fn=greet, inputs="text", outputs="text")
# interface.launch()


def gradio_history_to_openai_history(gradio_history: list[list[str]]):
    openai_history = [
        {
            "role": "system",
            "content": prompting.PROMPT_TEMPLATE,
        },
        {"role": "user", "content": prompting.EXAMPLE_INPUT},
        {"role": "assistant", "content": prompting.EXAMPLE_OUTPUT},
    ]

    for gradio_message in gradio_history:
        openai_history.append({"role": "user", "content": gradio_message[0]})
        if gradio_message[1]:
            openai_history.append({"role": "assistant", "content": gradio_message[1]})

    return openai_history


def bot(history: list[list[str]]):
    try:
        response = openai.ChatCompletion.create(
            model="gpt-3.5-turbo",
            messages=gradio_history_to_openai_history(history),
            stream=True,
        )
    except Exception as e:
        # An openai.error.RateLimitError can happen,
        # but we can also catch other exceptions just in case
        history[-1][1] = f"[ERROR] {type(e)}: {e}"
        return history

    history[-1][1] = ""
    for chunk in response:
        choice = chunk.choices[0]
        if choice.finish_reason is not None:
            break

        # The first chunk just says that the role is "assistant"
        # and doesn't have any content (text)
        if hasattr(choice.delta, "content"):
            history[-1][1] += choice.delta.content

        yield history


with gr.Blocks() as interface:
    chatbot = gr.Chatbot()
    msg = gr.Textbox()
    clear = gr.Button("Clear")

    def user(user_message, history):
        return "", history + [[user_message, None]]

    msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
        bot, chatbot, chatbot
    )
    clear.click(lambda: None, None, chatbot, queue=False)

interface.queue()
# demo.launch()