File size: 3,285 Bytes
44c4d91
c35600d
44c4d91
 
8b1f0bb
44c4d91
 
8b1f0bb
24d20df
 
 
 
44c4d91
24d20df
 
 
45afa26
3af3a0d
 
 
 
 
 
44c4d91
 
 
 
 
 
3af3a0d
 
 
44c4d91
 
 
 
3af3a0d
 
44c4d91
 
 
 
3af3a0d
44c4d91
 
3af3a0d
79aceb3
 
 
3af3a0d
44c4d91
3af3a0d
44c4d91
 
3af3a0d
 
 
44c4d91
 
3af3a0d
44c4d91
 
3af3a0d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
44c4d91
3af3a0d
 
 
 
 
 
 
 
 
 
 
 
44c4d91
a89fdf4
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
import os

from openai import OpenAI
import gradio as gr

api_key = os.environ.get('OPENAI_API_KEY')
client = OpenAI(api_key=api_key)

MODELS = [
    'gpt-4o',
    'gpt-4o-mini',
    'gpt-4',
    'gpt-4-turbo',
    'gpt-3.5-turbo',
]


def generate(message, history, model, system_prompt,
    temperature=1.0, top_p=1.0, frequency_penalty=0.0, presence_penalty=0.0):
    
    history_openai_format = [{"role": "system", "content": system_prompt}]
    for user, assistant in history:
        history_openai_format.append({"role": "user", "content": user})
        history_openai_format.append({"role": "assistant", "content": assistant})
    history_openai_format.append({"role": "user", "content": message})

    response = client.chat.completions.create(model=model,
                                              messages=history_openai_format,
                                              temperature=temperature,
                                              top_p=top_p,
                                              frequency_penalty=frequency_penalty,
                                              presence_penalty=presence_penalty,
                                              stream=True)

    partial_message = ""
    for chunk in response:
        if chunk.choices and chunk.choices[0].delta.content is not None:
            partial_message += chunk.choices[0].delta.content
            yield partial_message


chat_interface = gr.ChatInterface(
    title='💬 Private ChatGPT',
    description='Chat with OpenAI models using their official API. OpenAI <a href="https://platform.openai.com/docs/concepts">promises</a> not to train on input or output of API calls.',
    fn=generate,
    analytics_enabled=False,
    chatbot=gr.Chatbot(
        show_label=False,
        show_copy_button=True,
        scale=1),
    additional_inputs=[
        gr.Dropdown(label="Model",
                    choices=MODELS,
                    value=MODELS[0],
                    allow_custom_value=False),
        gr.Textbox(label="System prompt",
                   value="Je bent een slimme, behulpzame assistent van Edwin Rijgersberg"),
        gr.Slider(label="Temperature",
                  minimum=0.,
                  maximum=2.0,
                  step=0.05,
                  value=1.0),
        gr.Slider(label="Top P",
                  minimum=0.,
                  maximum=1.0,
                  step=0.05,
                  value=1.0),
        gr.Slider(label="Frequency penalty",
                  minimum=0.,
                  maximum=1.0,
                  step=0.05,
                  value=0.),
        gr.Slider(label="Presence penalty",
                  minimum=0.,
                  maximum=1.0,
                  step=0.05,
                  value=0.),
    ],
    textbox=gr.Textbox(container=False,
                       show_label=False,
                       label="Message",
                       placeholder="Type een bericht...",
                       scale=7),
    additional_inputs_accordion=gr.Accordion(label="Instellingen", open=False),
    show_progress="full",
    submit_btn="Genereer",
    stop_btn="Stop",
    retry_btn="🔄 Opnieuw",
    undo_btn="↩️ Ongedaan maken",
    clear_btn="🗑️ Wissen",
)
chat_interface.launch(share=True)