Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -8,7 +8,15 @@ def predict(inputs, top_p, temperature, openai_api_key, chat_counter, chatbot=[]
|
|
8 |
messages = format_messages(chatbot, inputs, chat_counter)
|
9 |
payload = create_payload(messages, top_p, temperature)
|
10 |
response = make_request(API_URL, openai_api_key, payload)
|
11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
|
13 |
def format_messages(chatbot, inputs, chat_counter):
|
14 |
messages = []
|
@@ -46,7 +54,6 @@ def process_response(response, history):
|
|
46 |
for chunk in response.iter_lines():
|
47 |
if chunk:
|
48 |
chunk_str = chunk.decode('utf-8').lstrip('data: ')
|
49 |
-
# Verificar se a transmissão está concluída
|
50 |
if "[DONE]" in chunk_str:
|
51 |
break
|
52 |
try:
|
@@ -64,14 +71,12 @@ def process_response(response, history):
|
|
64 |
except json.JSONDecodeError as e:
|
65 |
print("Error decoding JSON response:", e)
|
66 |
print("Raw chunk:", chunk_str)
|
67 |
-
|
68 |
-
# Finalizar a construção da resposta
|
69 |
chat = [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)]
|
70 |
return chat, history, token_counter
|
71 |
|
72 |
|
73 |
|
74 |
-
|
75 |
def setup_ui():
|
76 |
with gr.Blocks() as demo:
|
77 |
with gr.Column():
|
|
|
8 |
messages = format_messages(chatbot, inputs, chat_counter)
|
9 |
payload = create_payload(messages, top_p, temperature)
|
10 |
response = make_request(API_URL, openai_api_key, payload)
|
11 |
+
|
12 |
+
# Processar a resposta
|
13 |
+
chat, new_history, token_counter = process_response(response, history)
|
14 |
+
|
15 |
+
# Atualizar o histórico apenas se houver novos tokens
|
16 |
+
if token_counter > 0:
|
17 |
+
history = new_history
|
18 |
+
|
19 |
+
return chat, history, token_counter
|
20 |
|
21 |
def format_messages(chatbot, inputs, chat_counter):
|
22 |
messages = []
|
|
|
54 |
for chunk in response.iter_lines():
|
55 |
if chunk:
|
56 |
chunk_str = chunk.decode('utf-8').lstrip('data: ')
|
|
|
57 |
if "[DONE]" in chunk_str:
|
58 |
break
|
59 |
try:
|
|
|
71 |
except json.JSONDecodeError as e:
|
72 |
print("Error decoding JSON response:", e)
|
73 |
print("Raw chunk:", chunk_str)
|
74 |
+
|
|
|
75 |
chat = [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)]
|
76 |
return chat, history, token_counter
|
77 |
|
78 |
|
79 |
|
|
|
80 |
def setup_ui():
|
81 |
with gr.Blocks() as demo:
|
82 |
with gr.Column():
|