File size: 3,743 Bytes
bc5816a
 
2a020c3
 
 
bc5816a
45a9f67
2a020c3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bc5816a
2a020c3
 
 
 
 
 
 
 
 
 
 
 
 
26c1ca2
fd488a6
 
 
3e6c9c3
26c1ca2
3e6c9c3
 
 
fd488a6
 
3e6c9c3
 
 
 
 
f598adb
 
fd488a6
 
 
2a020c3
 
bc5816a
f598adb
26c1ca2
fd488a6
45a9f67
2a020c3
 
 
 
 
 
 
 
 
 
 
 
 
 
3053875
45a9f67
bc5816a
45a9f67
 
2a020c3
04a4fb7
45a9f67
 
2a020c3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
import gradio as gr
import requests
import json

API_URL = "https://api.openai.com/v1/chat/completions"

def predict(inputs, top_p, temperature, openai_api_key, chat_counter, chatbot=[], history=[]):
    messages = format_messages(chatbot, inputs, chat_counter)
    payload = create_payload(messages, top_p, temperature)
    response = make_request(API_URL, openai_api_key, payload)
    return process_response(response, history)

def format_messages(chatbot, inputs, chat_counter):
    messages = []
    if chat_counter != 0:
        for i in range(len(chatbot)):
            user_message = {"role": "user", "content": chatbot[i][0]}
            assistant_message = {"role": "assistant", "content": chatbot[i][1]}
            messages.extend([user_message, assistant_message])
    messages.append({"role": "user", "content": inputs})
    return messages

def create_payload(messages, top_p, temperature):
    return {
        "model": "gpt-4-1106-preview",
        "messages": messages,
        "temperature": temperature,
        "top_p": top_p,
        "n": 1,
        "stream": True,
        "presence_penalty": 0,
        "frequency_penalty": 0,
    }

def make_request(url, api_key, payload):
    headers = {
        "Content-Type": "application/json",
        "Authorization": f"Bearer {api_key}"
    }
    response = requests.post(url, headers=headers, json=payload, stream=True)
    return response

def process_response(response, history):
    token_counter = 0 
    partial_words = "" 
    for chunk in response.iter_lines():
        if chunk:
            chunk_str = chunk.decode('utf-8').lstrip('data: ')
            # Verificar se a transmissão está concluída
            if "[DONE]" in chunk_str:
                break
            try:
                chunk_json = json.loads(chunk_str)
                if 'choices' in chunk_json and len(chunk_json['choices']) > 0:
                    chunk_data = chunk_json['choices'][0].get('delta', {})
                    if 'content' in chunk_data:
                        content = chunk_data['content']
                        partial_words += content
                        if token_counter == 0:
                            history.append(" " + partial_words)
                        else:
                            history[-1] = partial_words
                        token_counter += 1
            except json.JSONDecodeError as e:
                print("Error decoding JSON response:", e)
                print("Raw chunk:", chunk_str)
    
    # Finalizar a construção da resposta
    chat = [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)]
    return chat, history, token_counter




def setup_ui():
    with gr.Blocks() as demo:
        with gr.Column():
            openai_api_key = gr.Textbox(type='password', label="Insira sua chave de API OpenAI aqui")
            chatbot = gr.Chatbot() 
            inputs = gr.Textbox(placeholder="Olá!", label="Digite uma entrada e pressione Enter", lines=3)
            state = gr.State([])
            b1 = gr.Button(value="Executar", variant="primary")
            
            top_p = gr.Slider(minimum=0, maximum=1.0, value=1.0, step=0.05, label="Top-p")
            temperature = gr.Slider(minimum=0, maximum=1.0, value=1.0, step=0.05, label="Temperature")
            chat_counter = gr.Number(value=0, visible=False)

        inputs.submit(predict, [inputs, top_p, temperature, openai_api_key, chat_counter, chatbot, state], [chatbot, state, chat_counter])
        b1.click(predict, [inputs, top_p, temperature, openai_api_key, chat_counter, chatbot, state], [chatbot, state, chat_counter])

    return demo

def main():
    demo = setup_ui()
    demo.launch()

if __name__ == "__main__":
    main()