File size: 4,728 Bytes
331ac77 7b9cd2c 331ac77 db1061f 331ac77 db1061f 331ac77 db1061f 331ac77 24155be 331ac77 bdfee01 24155be 331ac77 bdfee01 331ac77 7b9cd2c 9a75ed0 37ff396 331ac77 db1061f 37ff396 b62c322 9a75ed0 db1061f 9a75ed0 db1061f 9a75ed0 db1061f 331ac77 37ff396 d99c739 37ff396 db1061f 37ff396 d99c739 331ac77 bdfee01 24155be 2123ed3 bdfee01 d445c9b bdfee01 5a9b18c 99f63a1 5a9b18c bdfee01 5a9b18c 6b4f11e 5a9b18c 6b4f11e 5a9b18c d445c9b 331ac77 6b4f11e 331ac77 7b9cd2c 331ac77 7b9cd2c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 |
import gradio as gr
import requests
import json
import os
API_KEY = os.getenv('API_KEY')
INVOKE_URL = "https://api.nvcf.nvidia.com/v2/nvcf/pexec/functions/df2bee43-fb69-42b9-9ee5-f4eabbeaf3a8"
FETCH_URL_FORMAT = "https://api.nvcf.nvidia.com/v2/nvcf/pexec/status/"
headers = {
"Authorization": f"Bearer {API_KEY}",
"Accept": "application/json",
"Content-Type": "application/json",
}
BASE_SYSTEM_MESSAGE = "I carefully provide accurate, factual, thoughtful, nuanced answers and am brilliant at reasoning."
def clear_chat(chat_history_state, chat_message):
print("Clearing chat...")
chat_history_state = []
chat_message = ''
return chat_history_state, chat_message
def user(message, history, system_message=None):
print(f"User message: {message}")
history = history or []
if system_message: # Check if a system message is provided and should be added
history.append({"role": "system", "content": system_message})
history.append({"role": "user", "content": message})
return history
def call_nvidia_api(history, max_tokens, temperature, top_p):
payload = {
"messages": history,
"temperature": temperature,
"top_p": top_p,
"max_tokens": max_tokens,
"stream": False
}
print(f"Payload enviado: {payload}") # Imprime o payload enviado
session = requests.Session()
response = session.post(INVOKE_URL, headers=headers, json=payload)
while response.status_code == 202:
request_id = response.headers.get("NVCF-REQID")
fetch_url = FETCH_URL_FORMAT + request_id
response = session.get(fetch_url, headers=headers)
response.raise_for_status()
response_body = response.json()
print(f"Payload recebido: {response_body}") # Imprime o payload recebido
if response_body["choices"]:
assistant_message = response_body["choices"][0]["message"]["content"]
history.append({"role": "assistant", "content": assistant_message})
return history
def chat(history, system_message, max_tokens, temperature, top_p, top_k, repetition_penalty):
print("Starting chat...")
updated_history = call_nvidia_api(history, max_tokens, temperature, top_p)
return updated_history, ""
# Gradio interface setup
with gr.Blocks() as demo:
with gr.Row():
with gr.Column():
gr.Markdown("## Your Chatbot Interface")
chatbot = gr.Chatbot()
message = gr.Textbox(label="What do you want to chat about?", placeholder="Ask me anything.", lines=3)
submit = gr.Button(value="Send message")
clear = gr.Button(value="New topic")
system_msg = gr.Textbox(BASE_SYSTEM_MESSAGE, label="System Message", placeholder="System prompt.", lines=5)
max_tokens = gr.Slider(20, 512, label="Max Tokens", step=20, value=500)
temperature = gr.Slider(0.0, 1.0, label="Temperature", step=0.1, value=0.7)
top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.95)
chat_history_state = gr.State([])
def update_chatbot(message, chat_history, system_message=BASE_SYSTEM_MESSAGE):
print("Updating chatbot...")
if not chat_history or (chat_history and chat_history[-1]["role"] != "user"):
chat_history = user(message, chat_history, system_message if not chat_history else None)
else:
chat_history = user(message, chat_history)
chat_history, _ = chat(chat_history, system_message, max_tokens.value, temperature.value, top_p.value, 40, 1.1)
formatted_chat_history = []
last_role = None
for entry in chat_history:
role = entry["role"]
content = entry["content"].strip() # Trim whitespace to check for empty content
if content: # Only proceed if content is not empty
if role != last_role:
if role == "user":
formatted_chat_history.append([content, ""])
elif role == "assistant":
formatted_chat_history.append(["", content])
last_role = role
else:
if role == "user" and content:
formatted_chat_history[-1][0] += "\n" + content
elif role == "assistant" and content:
formatted_chat_history[-1][1] += "\n" + content
return formatted_chat_history, chat_history, ""
submit.click(
fn=update_chatbot,
inputs=[message, chat_history_state],
outputs=[chatbot, chat_history_state, message]
)
clear.click(
fn=clear_chat,
inputs=[chat_history_state, message],
outputs=[chat_history_state, message]
)
demo.launch() |