File size: 3,778 Bytes
fe8aaa6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import gradio as gr
import json
import ssl
import http.client

def get_api_key():
    
    context = ssl.create_default_context()
    context.check_hostname = True
    
    conn = http.client.HTTPSConnection("test.neuralinternet.ai", context=context)
    conn.request("GET", "/admin/api-keys/")
    api_key_resp = conn.getresponse()
    api_key_string = api_key_resp.read().decode("utf-8").replace("\n", "").replace("\t", "")
    api_key_json = json.loads(api_key_string)
    api_key = api_key_json[0]['api_key']
    conn.close()
    return api_key
    
def generate_top_response(system_prompt,model_input, api_key):
    payload = json.dumps(
            {"top_n": 100, "messages": [{"role": "system", "content": system_prompt},{"role": "user", "content": model_input}]}
        )

    headers = {
            "Content-Type": "application/json",
            "Authorization": f"Bearer {api_key}",
            "Endpoint-Version": "2023-05-19",
    }
        
    context = ssl.create_default_context()
    context.check_hostname = True

    conn = http.client.HTTPSConnection("test.neuralinternet.ai", context=context)
    conn.request("POST", "/chat", payload, headers)
    response = conn.getresponse()
    utf_string = response.read().decode("utf-8").replace("\n", "").replace("\t", "")
    print(utf_string)
    json_resp = json.loads(utf_string)
    conn.close()
    for choice in json_resp['choices']:
        uid = choice['uid']
        return uid, choice['message']['content']

def generate_benchmark_response(system_prompt, model_input, api_key):
    
    context = ssl.create_default_context()
    context.check_hostname = True
    
    
    conn = http.client.HTTPSConnection("test.neuralinternet.ai", context=context)
    conn.request("GET", "/top_miner_uids")
    benchmark_uid_resp = conn.getresponse()
    benchmark_uid_string = benchmark_uid_resp.read().decode("utf-8").replace("\n", "").replace("\t", "")
    benchmark_uid_json = json.loads(benchmark_uid_string)
    conn.close()
    
    payload = json.dumps(
            {"uids": benchmark_uid_json , "messages": [{"role": "system", "content": system_prompt},{"role": "user", "content": model_input}]}
        )

    headers = {
            "Content-Type": "application/json",
            "Authorization": f"Bearer {api_key}",
            "Endpoint-Version": "2023-05-19",
    }
        
    conn = http.client.HTTPSConnection("test.neuralinternet.ai", context=context)
    conn.request("POST", "/chat", payload, headers)
    response = conn.getresponse()
    utf_string = response.read().decode("utf-8").replace("\n", "").replace("\t", "")
    json_resp = json.loads(utf_string)
    #print(utf_string)
    conn.close()
    
    for choice in json_resp['choices']:
        uid = choice['uid']
        model_resp = choice['message']['content']
        return uid, model_resp
    
def dynamic_function(system_prompt, prompt):
    
    if len(system_prompt) == 0:
        system_prompt = "You are an AI Assistant, created by bittensor and powered by NI(Neural Internet). Your task is to provide consise response to user's prompt"    
    api_key = get_api_key()
    top_uid, top_response = generate_top_response(system_prompt, prompt, api_key) 
    benchmark_uid, benchmark_response = generate_benchmark_response(system_prompt, prompt, api_key)
    
    return f"TOP_{top_uid}: {top_response}\n\n\nBenchmark_{benchmark_uid}:{benchmark_response}"

interface = gr.Interface(
    fn=dynamic_function,
    inputs=[
        gr.inputs.Textbox(label="System Prompt", optional=True),
        gr.inputs.Textbox(label="Enter your question")
        ],
    outputs=gr.outputs.Textbox(label="Responses"),
    title="Bittensor Compare Util",
)


# Launch the Gradio Interface
interface.launch(share=False, enable_queue=True)