artificialguybr's picture
Refactor chatbot message handling and formatting
2123ed3
raw
history blame
3.7 kB
import gradio as gr
import requests
import json
import os
API_KEY = os.getenv('API_KEY')
INVOKE_URL = "https://api.nvcf.nvidia.com/v2/nvcf/pexec/functions/df2bee43-fb69-42b9-9ee5-f4eabbeaf3a8"
FETCH_URL_FORMAT = "https://api.nvcf.nvidia.com/v2/nvcf/pexec/status/"
headers = {
"Authorization": f"Bearer {API_KEY}",
"Accept": "application/json",
"Content-Type": "application/json",
}
BASE_SYSTEM_MESSAGE = "I carefully provide accurate, factual, thoughtful, nuanced answers and am brilliant at reasoning."
def clear_chat(chat_history_state, chat_message):
print("Clearing chat...")
chat_history_state = []
chat_message = ''
return chat_history_state, chat_message
def user(message, history):
print(f"User message: {message}")
history = history or []
history.append({"role": "user", "content": message})
return history
def call_nvidia_api(history, max_tokens, temperature, top_p):
payload = {
"messages": history,
"temperature": temperature,
"top_p": top_p,
"max_tokens": max_tokens,
"stream": False
}
session = requests.Session()
response = session.post(INVOKE_URL, headers=headers, json=payload)
# Novo método de polling para verificar o status da resposta
while response.status_code == 202:
request_id = response.headers.get("NVCF-REQID")
fetch_url = FETCH_URL_FORMAT + request_id
response = session.get(fetch_url, headers=headers)
response.raise_for_status()
response_body = response.json()
# Processar a resposta da API aqui
if response_body["choices"]:
assistant_message = response_body["choices"][0]["message"]["content"]
history.append({"role": "assistant", "content": assistant_message})
return history
def chat(history, system_message, max_tokens, temperature, top_p, top_k, repetition_penalty):
print("Starting chat...")
updated_history = call_nvidia_api(history, max_tokens, temperature, top_p)
return updated_history, ""
# Gradio interface setup
with gr.Blocks() as demo:
with gr.Row():
with gr.Column():
gr.Markdown("## Your Chatbot Interface")
chatbot = gr.Chatbot()
message = gr.Textbox(label="What do you want to chat about?", placeholder="Ask me anything.", lines=3)
submit = gr.Button(value="Send message")
clear = gr.Button(value="New topic")
system_msg = gr.Textbox(BASE_SYSTEM_MESSAGE, label="System Message", placeholder="System prompt.", lines=5)
max_tokens = gr.Slider(20, 512, label="Max Tokens", step=20, value=500)
temperature = gr.Slider(0.0, 1.0, label="Temperature", step=0.1, value=0.7)
top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.95)
chat_history_state = gr.State([])
def update_chatbot(message, chat_history):
print("Updating chatbot...")
# Ensure the user's message is not added twice
if not chat_history or (chat_history and chat_history[-1]["role"] != "user"):
chat_history = user(message, chat_history)
chat_history, _ = chat(chat_history, system_msg.value, max_tokens.value, temperature.value, top_p.value, 40, 1.1)
# Format messages for display, removing roles from content
formatted_chat_history = [(msg["role"], msg["content"]) for msg in chat_history]
return formatted_chat_history, chat_history, ""
submit.click(
fn=update_chatbot,
inputs=[message, chat_history_state],
outputs=[chatbot, chat_history_state, message]
)
clear.click(
fn=clear_chat,
inputs=[chat_history_state, message],
outputs=[chat_history_state, message]
)
demo.launch()