Spaces:
Runtime error
Runtime error
add jwt token
Browse files
app.py
CHANGED
@@ -3,8 +3,8 @@ from blindbox.requests import SecureSession
|
|
3 |
|
4 |
DEMO_SERVER = "4.208.9.167:80"
|
5 |
text = "<h1 style='text-align: center; color: white;'>π
SantaCoder with <span style='color: #f0ba2d;'>BlindBox:</span> Private Code Generation </h1><p style='text-align: left; color: white;'>This is our demo for our <a style='color: #f0ba2d;', href='https://blog-mithril-security.ghost.io/ai-assisted-code-generation-with-privacy-guarantees-securely-deploy-santacoder-with-blindbox'>article</a> on deploying code generation LLM models with BlindBox. The user input is <a style='color: #f0ba2d;', href='https://blindbox.mithrilsecurity.io/en/latest/docs/getting-started/confidential_computing/'>protected during computation</a> by leveraging state-of-the-art <a style='color: #f0ba2d;', href='https://www.ibm.com/topics/confidential-computing'> Confidental Computing technologies.</a> This means that data sent to Santacoder model is never accessible to the service provider during computation- private code remains private!</p><p style='text-align: left; color: white;'>You can see how we deployed the model in the integration section of our <a style='color: #f0ba2d;', href='https://blindbox.mithrilsecurity.io/en/latest/docs/how-to-guides/santacoder/'>documentation!</a></p><p style='text-align: left; color: white;'>β οΈ BlindBox is still under development. Do not test with production data!</p>"
|
6 |
-
bullets = "<ul><li style='color: white;'>β
Connection verified by attestation</li><li style='color: white;'>β
Application deployed on Confidential VM</li></ul><p style='color: white;'>Features coming soon:</p><ul><li style='color: white;'>β TLS</li><li style='color: white;'>β Network isolation</li></ul>"
|
7 |
-
|
8 |
def run_query( server, prompt):
|
9 |
if server == "Non-confidential VM server":
|
10 |
return ("β Error: you can only connect to an application running on a Confidential VM")
|
@@ -12,12 +12,13 @@ def run_query( server, prompt):
|
|
12 |
if prompt == None:
|
13 |
return ("β Error: please provide input code")
|
14 |
message = "\n\nβ
Secure query succesful"
|
|
|
15 |
try:
|
16 |
with SecureSession(f"http://{DEMO_SERVER}", POLICY) as secure_session:
|
17 |
res = secure_session.post(endpoint="/generate", json={"input_text": prompt})
|
18 |
cleaned = res.text.replace('\\n', '\n').split('\n\n')[0].split(':"')[1]
|
19 |
cleaned = cleaned.replace('\\', '')
|
20 |
-
return(cleaned + message)
|
21 |
except Exception as err:
|
22 |
return(f"β Query failed!\n{err}")
|
23 |
|
@@ -36,7 +37,10 @@ with demo:
|
|
36 |
trigger = gr.Button(label="Run query")
|
37 |
with gr.Column():
|
38 |
output = gr.Textbox(placeholder="Output", label="Output")
|
39 |
-
|
|
|
|
|
|
|
40 |
gr.HTML(label="Contact", value="<img src='https://github.com/mithril-security/blindbox/blob/laura-images/docs/assets/contact-us.png?raw=true.png' alt='contact' style='display: block; margin: auto; max-width: 600px;'>")
|
41 |
if __name__ == "__main__":
|
42 |
demo.launch()
|
|
|
3 |
|
4 |
DEMO_SERVER = "4.208.9.167:80"
|
5 |
text = "<h1 style='text-align: center; color: white;'>π
SantaCoder with <span style='color: #f0ba2d;'>BlindBox:</span> Private Code Generation </h1><p style='text-align: left; color: white;'>This is our demo for our <a style='color: #f0ba2d;', href='https://blog-mithril-security.ghost.io/ai-assisted-code-generation-with-privacy-guarantees-securely-deploy-santacoder-with-blindbox'>article</a> on deploying code generation LLM models with BlindBox. The user input is <a style='color: #f0ba2d;', href='https://blindbox.mithrilsecurity.io/en/latest/docs/getting-started/confidential_computing/'>protected during computation</a> by leveraging state-of-the-art <a style='color: #f0ba2d;', href='https://www.ibm.com/topics/confidential-computing'> Confidental Computing technologies.</a> This means that data sent to Santacoder model is never accessible to the service provider during computation- private code remains private!</p><p style='text-align: left; color: white;'>You can see how we deployed the model in the integration section of our <a style='color: #f0ba2d;', href='https://blindbox.mithrilsecurity.io/en/latest/docs/how-to-guides/santacoder/'>documentation!</a></p><p style='text-align: left; color: white;'>β οΈ BlindBox is still under development. Do not test with production data!</p>"
|
6 |
+
bullets = "<ul><li style='color: white;'>β
Connection verified by <a style='color: #f0ba2d;', href='https://blindbox.mithrilsecurity.io/en/latest/docs/security/attestation'>attestation</a></li><li style='color: white;'>β
Application deployed on Confidential VM</li></ul><p style='color: white;'>Features coming soon:</p><ul><li style='color: white;'>β TLS</li><li style='color: white;'>β Network isolation</li></ul>"
|
7 |
+
token_info = "<p style='color: white;'> Find out more about the MAA attesation token <a style='color: #f0ba2d;', href='https://blindbox.mithrilsecurity.io/en/latest/docs/security/attestation/#maa-attestation-token'>here!</a>"
|
8 |
def run_query( server, prompt):
|
9 |
if server == "Non-confidential VM server":
|
10 |
return ("β Error: you can only connect to an application running on a Confidential VM")
|
|
|
12 |
if prompt == None:
|
13 |
return ("β Error: please provide input code")
|
14 |
message = "\n\nβ
Secure query succesful"
|
15 |
+
message2 = "β
Attestation validated\n"
|
16 |
try:
|
17 |
with SecureSession(f"http://{DEMO_SERVER}", POLICY) as secure_session:
|
18 |
res = secure_session.post(endpoint="/generate", json={"input_text": prompt})
|
19 |
cleaned = res.text.replace('\\n', '\n').split('\n\n')[0].split(':"')[1]
|
20 |
cleaned = cleaned.replace('\\', '')
|
21 |
+
return(cleaned + message, message2 + secure_session.jwt)
|
22 |
except Exception as err:
|
23 |
return(f"β Query failed!\n{err}")
|
24 |
|
|
|
37 |
trigger = gr.Button(label="Run query")
|
38 |
with gr.Column():
|
39 |
output = gr.Textbox(placeholder="Output", label="Output")
|
40 |
+
with gr.Accordion("Attestation token (signed JWT token): ", open=False):
|
41 |
+
output2 = gr.Textbox(placeholder="Attestation token", label="Output")
|
42 |
+
gr.Markdown(value=token_info)
|
43 |
+
trigger.click(fn=run_query, inputs=[server, prompt], outputs=[output, output2])
|
44 |
gr.HTML(label="Contact", value="<img src='https://github.com/mithril-security/blindbox/blob/laura-images/docs/assets/contact-us.png?raw=true.png' alt='contact' style='display: block; margin: auto; max-width: 600px;'>")
|
45 |
if __name__ == "__main__":
|
46 |
demo.launch()
|