File size: 3,435 Bytes
0603825
 
 
d105155
0603825
41f4057
 
c5c5f4d
41f4057
 
c5c5f4d
41f4057
c5c5f4d
0603825
 
 
7d5cf5f
 
0603825
d105155
0603825
 
 
b18146e
c929fc3
 
b18146e
c5c5f4d
 
0603825
41f4057
c5c5f4d
41f4057
c5c5f4d
41f4057
 
 
 
 
c5c5f4d
0603825
 
 
 
41f4057
0603825
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import gradio as gr
from blindbox.requests import SecureSession

DEMO_SERVER = "4.208.9.167:80"

def run_query( server, policy, prompt):
    if prompt == None or server == None or policy == None:
        return("⛔ Error: please select an option for stages 1 and 2")
    if len(prompt) == 0 or len(policy) == 0 or len(server) == 0:
        return("⛔ Error: please select an option for stages 1-3")
    if server != "Authentic and verified confidential VM server":
        return ("⛔ Error: you can only connect to an application running on a Confidential VM")
    POLICY = "./cce_policy.txt"
    try:
        with SecureSession(f"http://{DEMO_SERVER}", POLICY) as secure_session:
            res = secure_session.post(endpoint="/generate", json={"input_text": prompt})
            cleaned = res.text.replace('\\n', '\n').split('\n\n')[0].split(':"')[1]
            return("✅ Query successful\n" + cleaned)
    except Exception as err:
        return(f"⛔ Query failed!\n{err}")

with gr.Blocks(theme=gr.themes.Soft()) as demo:
    gr.Markdown("<h1><center>🔒Confidential code generation with BlindBox and Santacoder</center></h1>")
    gr.Markdown("<p>This is the demo for our article on deploying code generation LLM models with BlindBox: <b>AI-assisted code generation with privacy guarantees: Securely deploy SantaCoder with BlindBox</b><br>You can view the article  <a href='https://blog-mithril-security.ghost.io/ai-assisted-code-generation-with-privacy-guarantees-securely-deploy-santacoder-with-blindbox'>here!</a></p>")
    gr.Markdown("<p>You can use this demo to send a function definition to BigCode's open-source Santacoder model and get back an auto-completed function.</p>")
    gr.Markdown("<p>The model is deployed within a highly-isolated Trusted Execution Environment, meaning that we, as the service provider, have no access to the data sent to this model!</p>")
    gr.Markdown("<p>You can see how we deployed the model by checking out the integration section of our <a href='https://blindbox.mithrilsecurity.io/en/latest/docs/how-to-guides/santacoder/'>documentation!</p>")
    gr.Markdown("><h3>Step 1: Check that we are connecting to an authentic confidential VM")
    gr.Markdown("<p>This first option allows you to choose whether to connect to the Santacoder application deployed with BlindBox on a verified confidential VM or the same application deployed on a dummy server which is not within a confidential VM!<br>This demonstrates how BlindBox blocks requests to non-authentic confidential VMs!</p>")
    with gr.Column():
        server = gr.Radio(
        ["Authentic and verified confidential VM server", "Unauthentic dummy server"], label="Select the server you want to connect to"
        )
    gr.Markdown("><h3>Step 2: Select your prompt</h3>")
    gr.Markdown("<p>Select between the following prompt examples we provide.</p>")
    with gr.Column():
        prompt = gr.Radio(
        ["def sum(x, y):", "def print_name(name):", "def hello_world():", "def square_root(nbr):"], label="Select your user prompt"
        )
    gr.Markdown("><h3>Step 3: Query the Santacoder model</h3>")
    with gr.Column():
        trigger = gr.Button("Test query")
    with gr.Column():
        output = gr.Textbox(placeholder="Output", label="See the output of your query here")
    trigger.click(fn=run_query, inputs=[server, policy, prompt], outputs=output)

if __name__ == "__main__":
    demo.launch()