import gradio as gr from blindbox.requests import SecureSession DEMO_SERVER = "4.208.9.167:80" def run_query( prompt): POLICY = "./cce_policy.txt" if len(prompt) == 0: return("⛔ Error: please add your query for automated code completion") try: with SecureSession(f"http://{DEMO_SERVER}", POLICY) as secure_session: res = secure_session.post(endpoint="/generate", json={"input_text": prompt}) cleaned = res.text.replace('\\n', '\n').split('\n\n')[0].split(':"')[1] return("✅ Query successful\n" + cleaned) except Exception as err: return(f"⛔ Query failed!\n{err}") with gr.Blocks(theme=gr.themes.Soft()) as demo: gr.Markdown("
This is the demo for our article on deploying code generation LLM models with BlindBox: AI-assisted code generation with privacy guarantees: Securely deploy SantaCoder with BlindBox
You can view the article here!
You can use this demo to send a function definition to BigCode's open-source Santacoder model and get back an auto-completed function.
") gr.Markdown("The model is deployed within a highly-isolated Trusted Execution Environment, meaning that we, as the service provider, have no access to the data sent to this model!
") with gr.Column(): prompt = gr.Textbox(lines=2, placeholder="Enter function definition here e.g. def add_together(x,y):") with gr.Column(): trigger = gr.Button("Test query") with gr.Column(): output = gr.Textbox(placeholder="Output", label="See the output of your query here") trigger.click(fn=run_query, inputs=[prompt], outputs=output) if __name__ == "__main__": demo.launch()