File size: 3,703 Bytes
331ac77
 
 
7b9cd2c
331ac77
db1061f
331ac77
db1061f
331ac77
 
 
db1061f
 
331ac77
 
 
 
 
24155be
331ac77
 
 
 
 
24155be
331ac77
 
7b9cd2c
37ff396
331ac77
db1061f
37ff396
 
 
 
 
b62c322
db1061f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
331ac77
37ff396
d99c739
37ff396
db1061f
37ff396
d99c739
 
 
 
 
 
 
 
 
 
 
 
 
 
331ac77
9d53066
24155be
2123ed3
 
d445c9b
2123ed3
 
d445c9b
 
331ac77
2123ed3
331ac77
 
 
 
 
 
 
7b9cd2c
331ac77
 
 
 
7b9cd2c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
import gradio as gr
import requests
import json
import os

API_KEY = os.getenv('API_KEY') 
INVOKE_URL = "https://api.nvcf.nvidia.com/v2/nvcf/pexec/functions/df2bee43-fb69-42b9-9ee5-f4eabbeaf3a8"
FETCH_URL_FORMAT = "https://api.nvcf.nvidia.com/v2/nvcf/pexec/status/"

headers = {
    "Authorization": f"Bearer {API_KEY}",
    "Accept": "application/json",
    "Content-Type": "application/json",
}

BASE_SYSTEM_MESSAGE = "I carefully provide accurate, factual, thoughtful, nuanced answers and am brilliant at reasoning."

def clear_chat(chat_history_state, chat_message):
    print("Clearing chat...")
    chat_history_state = []
    chat_message = ''
    return chat_history_state, chat_message

def user(message, history):
    print(f"User message: {message}")
    history = history or []
    history.append({"role": "user", "content": message})
    return history
def call_nvidia_api(history, max_tokens, temperature, top_p):
    payload = {
        "messages": history,
        "temperature": temperature,
        "top_p": top_p,
        "max_tokens": max_tokens,
        "stream": False
    }

    session = requests.Session()
    response = session.post(INVOKE_URL, headers=headers, json=payload)
    # Novo método de polling para verificar o status da resposta
    while response.status_code == 202:
        request_id = response.headers.get("NVCF-REQID")
        fetch_url = FETCH_URL_FORMAT + request_id
        response = session.get(fetch_url, headers=headers)
    
    response.raise_for_status()
    response_body = response.json()
    
    # Processar a resposta da API aqui
    if response_body["choices"]:
        assistant_message = response_body["choices"][0]["message"]["content"]
        history.append({"role": "assistant", "content": assistant_message})
    
    return history

def chat(history, system_message, max_tokens, temperature, top_p, top_k, repetition_penalty):
    print("Starting chat...")
    updated_history = call_nvidia_api(history, max_tokens, temperature, top_p)
    return updated_history, ""

# Gradio interface setup
with gr.Blocks() as demo:
    with gr.Row():
        with gr.Column():
            gr.Markdown("## Your Chatbot Interface")
    chatbot = gr.Chatbot()
    message = gr.Textbox(label="What do you want to chat about?", placeholder="Ask me anything.", lines=3)
    submit = gr.Button(value="Send message")
    clear = gr.Button(value="New topic")
    system_msg = gr.Textbox(BASE_SYSTEM_MESSAGE, label="System Message", placeholder="System prompt.", lines=5)
    max_tokens = gr.Slider(20, 512, label="Max Tokens", step=20, value=500)
    temperature = gr.Slider(0.0, 1.0, label="Temperature", step=0.1, value=0.7)
    top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.95)
    chat_history_state = gr.State([])

    def update_chatbot(message, chat_history):
        print("Updating chatbot...")
        # Ensure the user's message is not added twice
        if not chat_history or (chat_history and chat_history[-1]["role"] != "user"):
            chat_history = user(message, chat_history)
        chat_history, _ = chat(chat_history, system_msg.value, max_tokens.value, temperature.value, top_p.value, 40, 1.1)
        # Format messages for display, removing roles from content
        formatted_chat_history = [(msg["role"], msg["content"]) for msg in chat_history]
        return formatted_chat_history, chat_history, ""


    submit.click(
        fn=update_chatbot,
        inputs=[message, chat_history_state],
        outputs=[chatbot, chat_history_state, message]
    )

    clear.click(
        fn=clear_chat,
        inputs=[chat_history_state, message],
        outputs=[chat_history_state, message]
    )

demo.launch()