import copy import io import random import subprocess import threading import time from glob import glob from pathlib import Path import gradio as gr import matplotlib.pyplot as plt import numpy import requests from PIL import Image from scipy import stats from server import * from tqdm import tqdm from utils import * from concrete.ml.deployment import FHEModelClient CURRENT_DIR = Path(__file__).parent subprocess.Popen(["uvicorn", "server:app"], cwd=CURRENT_DIR) time.sleep(3) numpy.set_printoptions(threshold=numpy.inf) USER_ID = numpy.random.randint(0, 2**32) # Define client-specific directories CLIENT_DIR = ROOT_DIR / f"user_{USER_ID}/client" CLIENT_KEY_SMOOTHER_MODULE_DIR = CLIENT_DIR / KEY_SMOOTHER_MODULE_DIR CLIENT_KEY_BASE_MODULE_DIR = CLIENT_DIR / KEY_BASE_MODULE_DIR CLIENT_ENCRYPTED_INPUT_DIR = CLIENT_DIR / ENCRYPTED_INPUT_DIR CLIENT_ENCRYPTED_OUTPUT_DIR = CLIENT_DIR / ENCRYPTED_OUTPUT_DIR # Define server-specific directories SERVER_DIR = ROOT_DIR / f"user_{USER_ID}/server" SERVER_KEY_SMOOTHER_MODULE_DIR = SERVER_DIR / KEY_SMOOTHER_MODULE_DIR SERVER_KEY_BASE_MODULE_DIR = SERVER_DIR / KEY_BASE_MODULE_DIR SERVER_ENCRYPTED_INPUT_DIR = SERVER_DIR / ENCRYPTED_INPUT_DIR SERVER_ENCRYPTED_OUTPUT_DIR = SERVER_DIR / ENCRYPTED_OUTPUT_DIR ALL_DIRECTORIES = [ CLIENT_KEY_SMOOTHER_MODULE_DIR, CLIENT_KEY_BASE_MODULE_DIR, CLIENT_ENCRYPTED_INPUT_DIR, CLIENT_ENCRYPTED_OUTPUT_DIR, SERVER_KEY_SMOOTHER_MODULE_DIR, SERVER_KEY_BASE_MODULE_DIR, SERVER_ENCRYPTED_INPUT_DIR, SERVER_ENCRYPTED_OUTPUT_DIR, ] # Load test dataset print("Load data ...") os.makedirs("data", exist_ok=True) UNIQUE_FOUNDERS = load_pickle_from_zip("data/unique_founders.pkl") OTHER_TEST_FOUNDERS = load_pickle_from_zip("data/hf_test_founders.pkl") OTHER_TRAIN_FOUNDERS = load_pickle_from_zip("data/hf_train_founders.pkl") UNIQUE_MIXED_FOUNDERS = load_pickle_from_zip("data/unique_mixed_founders.pkl") DESCENDANT_PATH = Path("./data/Child.pkl") PREDICTION_IMG_PATH = Path("output.npg") FAMILY_TREE_IMG_PATH = Path("simulated_family_tree.png") ALL_GENERATED_PATHS = [ DESCENDANT_PATH, FAMILY_TREE_IMG_PATH, PREDICTION_IMG_PATH, FHE_COMPUTATION_TIMELINE, ] def reset(): """Reset the environment. Clean the root directory, recreating necessary directories and removing any generated files. """ print("Cleaning ...") clean_dir(ROOT_DIR) for directory in ALL_DIRECTORIES: directory.mkdir(parents=True, exist_ok=True) for file_path in ALL_GENERATED_PATHS: if file_path.exists(): file_path.unlink() print(f"File: {file_path} has been removed.") def simulate_allele_fn(): yield { simulate_btn: gr.update(visible=True, value="π Processing... Please wait."), ethnicity_simulation_img: gr.update(visible=False), simulate_text: gr.update(visible=False), } start_time = time.time() n_generations = random.randint(1, 3) individuals = random.sample(UNIQUE_MIXED_FOUNDERS, 2 + n_generations) num_snps = META["C"] print(f"Simulating family tree with: {n_generations} generations ...") first_founder = individuals.pop(0) second_founder = individuals.pop(0) assert len(first_founder) == 4 assert len(second_founder) == 4 assert ( sum([numpy.array_equal(arr1, arr2) for arr1, arr2 in zip(first_founder, second_founder)]) == 0 ) lineages = [] labels = [] admix = [] for gen in range(n_generations + 1): print(f"Generation_{gen}:") # Initialize the child for this generation if gen == 0: # Use the specified founders for the first generation founder_1 = first_founder founder_2 = second_founder labels.append(["Ancestor 1", "Ancestor 2", "Progeny (Generation 1)"]) else: # Use the last child from the previous generation as founder 1 founder_1 = admix[-1] founder_2 = individuals.pop(0) labels.append( [labels[-1][-1], f"Ancestor {len(labels) + 2}", f"Progeny (Generation {gen + 1})"] ) assert len(founder_1) == 4 assert len(founder_2) == 4 # Prepare new admix entry for this generation's child admix.append([None, None, None, None]) snp_1, snp_2, label_1, label_2 = copy.deepcopy(founder_1) _snp_1, _snp_2, _label_1, _label_2 = copy.deepcopy(founder_2) lineage = [] for j in range(2): # Two haplotypes # Select one haplotype from each founder snp, label = (snp_1, label_1) if random.random() < 0.5 else (snp_2, label_2) _snp, _label = (_snp_1, _label_1) if random.random() < 0.5 else (_snp_2, _label_2) lineage.append( [compute_distribution(label.flatten()), compute_distribution(_label.flatten())] ) breakpoints = numpy.random.choice( range(1, num_snps), # size=int(sum(numpy.random.poisson(0.75, size=gen))) + 1, size=int(sum(numpy.random.poisson(0.1, size=gen))) + 1, # Fewer breakpoints, less mixed replace=False, ) breakpoints = numpy.concatenate(([0], numpy.sort(breakpoints), [num_snps])) for k in range(len(breakpoints) - 1): snp[breakpoints[k] : breakpoints[k + 1]] = _snp[ breakpoints[k] : breakpoints[k + 1] ].copy() label[breakpoints[k] : breakpoints[k + 1]] = _label[ breakpoints[k] : breakpoints[k + 1] ].copy() yield { simulate_btn: gr.update( visible=True, value=f"β³ Time elapsed: {time.time() - start_time:.0f} seconds ({(gen + 1)/(n_generations + 1):.0%})", ) } admix[-1][j], admix[-1][j + 2] = snp, label lineages.append([(lineage[0][0] + lineage[1][0]) / 2, (lineage[0][1] + lineage[1][1]) / 2]) print(f"Ascendant_1: {lineages[-1][0]} + Ascendant_2 {lineages[-1][0]}") last_child = admix[-1] snp_1, snp_2, label_1, label_2 = last_child[0], last_child[1], last_child[2], last_child[3] snp, label_full = (snp_1, label_1) if random.random() < 0.5 else (snp_2, label_2) # We are prediction on one allele, so we plot the right allele lineages[-1][-1] = compute_distribution(label_full) l1 = label_full.reshape(1, -1) N, L = l1.shape y = l1[:, 0 : L // META["M"] * META["M"]].reshape(N, L // META["M"], META["M"]) y = stats.mode(y, axis=2)[0].squeeze() write_pickle(path="./data/Child.pkl", data=[snp.reshape(1, -1), y]) snp = numpy.array(snp).reshape(1, -1) yield { simulate_btn: gr.update( visible=True, value=f"β³ Time elapsed: {time.time() - start_time:.0f} seconds ({(gen + 1)/(n_generations + 1):.0%})", ) } print("Plot the simulated allele.") print(f"{snp.shape=} - {y.shape=}") print( f"{any(snp.flatten()[12343 : 12343 + 1000] == 1)=} - {any(snp.flatten()[12343 : 12343 + 1000] == 0)=}" ) yield { simulate_btn: gr.update( visible=True, value=f"β³ Time elapsed: {time.time() - start_time:.0f} seconds ({(gen + 1)/(n_generations + 1):.0%})", ) } _ = pie_ethnicity_simulation_plot_img(copy.copy(lineages), copy.copy(labels)) sorted_indices = numpy.argsort(lineages[-1][0])[::-1][:2] top_percentages = [lineages[-1][0][i] for i in sorted_indices] top_labels = [LABELS[i] for i in sorted_indices] # items = [f'{p:.0%} {l}' for p, l in zip(top_percentages, top_labels)] # items = [f'{p:.0%} {l}' for p, l in zip(lineages[-1][0], LABELS)] yield { simulate_btn: gr.update( visible=True, value=f"β³ Time elapsed: {time.time() - start_time:.0f} seconds ({(gen + 1)/(n_generations + 1):.0%})", ) } yield { clear_input_box: gr.update(visible=True, value=list(snp.flatten())[:321]), simulate_btn: gr.update(value="Data simulated β "), ethnicity_simulation_img: gr.update( value=Image.open(FAMILY_TREE_IMG_PATH), visible=True, show_label=False, show_download_button=True, container=True, ), simulate_text: gr.update( value=f"Given the genetic lineage simulation above, the origin of two predominant genes, for the last progeny are: {top_labels[-2]} and {top_labels[-1]}. Now, we proceed with ***DNA testing*** using **FHE** on this final descendant.", visible=True, ), } return def key_gen_fn(user_id): """Generate keys for a given user on the Client Side.""" print("\n------------ Step 1: Key Generation:") yield gr.update(visible=True, value="π Processing... Please wait.") print(f"Your user ID is: {user_id:.0f}....") ## Generate one key for all models since they share the same crypto params # Stage1: Base modules base_modules_path = sorted(glob(f"{SHARED_BASE_MODULE_DIR}/model_*"), key=extract_model_number) print(f"{len(base_modules_path)=} {META['NW']=}") if len(base_modules_path) != META["NW"]: yield gr.update(visible=True, value="β Error in key generation", interactive=False) base_client = FHEModelClient(path_dir=base_modules_path[0], key_dir=CLIENT_KEY_BASE_MODULE_DIR) base_client.generate_private_and_evaluation_keys() serialized_evaluation_base_modules_keys = base_client.get_serialized_evaluation_keys() assert isinstance(serialized_evaluation_base_modules_keys, bytes) print(f"Stage1: {len(glob(f'{CLIENT_KEY_BASE_MODULE_DIR}/eval_key'))} key has been generated") # Stage2: Smoother module smoother_client = FHEModelClient( path_dir=SHARED_SMOOTHER_MODULE_DIR, key_dir=CLIENT_KEY_SMOOTHER_MODULE_DIR ) smoother_client.generate_private_and_evaluation_keys() serialized_evaluation_smoother_module_keys = smoother_client.get_serialized_evaluation_keys() assert isinstance(serialized_evaluation_smoother_module_keys, bytes) print( f"Stage2: {len(glob(f'{CLIENT_KEY_SMOOTHER_MODULE_DIR}/eval_key'))} key has been generated" ) # Save the keys base_evaluation_key_path = Path(base_client.key_dir) / "eval_key" smoother_evaluation_key_path = Path(smoother_client.key_dir) / "eval_key" write_bytes(base_evaluation_key_path, serialized_evaluation_base_modules_keys) write_bytes(smoother_evaluation_key_path, serialized_evaluation_smoother_module_keys) if not base_evaluation_key_path.is_file(): msg = "β Error encountered while generating the base modules key evaluation" elif not smoother_evaluation_key_path.is_file(): msg = "β Error encountered while generating the smoother module key evaluation" else: msg = "Secret and public keys have been generated β " print(msg) yield gr.update(visible=True, value=msg, interactive=False) return def encrypt_fn(user_id): """Encrypt input on the Client Side using the secret key.""" print("\n------------ Step 2: Encrypt the input") if ( is_none(int(user_id)) or (len(glob(f"{CLIENT_KEY_BASE_MODULE_DIR}/*")) == 0) or not DESCENDANT_PATH.is_file() ): print("Error in encryption step: Provide your chromosome and generate the evaluation keys.") yield { encrypt_btn: gr.update( visible=True, value="β Ensure your have simulated an allele and the evaluation key has been generated.", ) } return allele, _ = read_pickle(path=DESCENDANT_PATH) yield { encrypt_btn: gr.update(visible=True, value="π Processing... Please wait."), send_btn: gr.update(interactive=False), run_fhe_btn: gr.update(interactive=False), get_output_btn: gr.update(interactive=False), decrypt_btn: gr.update(interactive=False), simulate_btn: gr.update(interactive=False), } base_modules_path = sorted(glob(f"{SHARED_BASE_MODULE_DIR}/model_*"), key=extract_model_number) assert len(base_modules_path) == META["NW"] print(f"{len(base_modules_path)} models have been loaded") client_fhemodels = [] for i, base_module_path in enumerate(tqdm(base_modules_path)): base_client = FHEModelClient(path_dir=base_module_path, key_dir=CLIENT_KEY_BASE_MODULE_DIR) client_fhemodels.append(base_client) base_serialized_evaluation_keys = read_bytes(base_client.key_dir / "eval_key") assert isinstance(base_serialized_evaluation_keys, bytes) X_p, _, M_, rem = process_data_for_base_modules(META, allele) base_args = tuple(zip(client_fhemodels[:-1], numpy.swapaxes(X_p, 0, 1))) base_args += ((client_fhemodels[-1], allele[:, allele.shape[1] - (M_ + rem) :]),) start_time = time.time() for i, (client, window) in enumerate(base_args): encrypted_input = client.quantize_encrypt_serialize(window) write_bytes(CLIENT_ENCRYPTED_INPUT_DIR / f"window_{i}", encrypted_input) yield { encrypt_btn: gr.update( visible=True, value=f"β³ Time elapsed: {time.time() - start_time:.0f} seconds ({i/META['NW']:.0%}).", ) } # f"β³ Time elapsed: {time.time() - start_time:.0f} seconds ({(gen + 1)/(n_generations + 1):.0%})") exec_time = time.time() - start_time msg = f"Encryption completed in {exec_time: .2f} seconds." print(msg) enc_quant_input_shorten_hex = encrypted_input.hex()[:INPUT_BROWSER_LIMIT] yield { encrypt_input_box: gr.update(visible=True, value=enc_quant_input_shorten_hex), encrypt_btn: gr.update(interactive=False, value=msg), simulate_btn: gr.update(interactive=False), send_btn: gr.update(interactive=True), run_fhe_btn: gr.update(interactive=True), get_output_btn: gr.update(interactive=True), decrypt_btn: gr.update(interactive=True), } return def send_input_fn(user_id): """Send the encrypted data and the evaluation key to the server.""" print("\n------------ Step 3.1: Send encrypted_data to the Server") errors = [] if not (CLIENT_KEY_BASE_MODULE_DIR / "eval_key").is_file(): errors.append("Stage 1 evaluation keys are missing.") if not (CLIENT_KEY_SMOOTHER_MODULE_DIR / "eval_key").is_file(): errors.append("Stage 2 evaluation keys are missing. ") if len(glob(str(CLIENT_ENCRYPTED_INPUT_DIR / "window_*"))) != META["NW"]: errors.append("The input has not been successfully encrypted.") if errors: error_message = "β Error during data transmission:\n" + "\n".join(errors) print(error_message) yield {send_btn: gr.update(value=error_message)} return yield { send_btn: gr.update(value="π Processing... Please wait."), run_fhe_btn: gr.update(interactive=False), get_output_btn: gr.update(interactive=False), decrypt_btn: gr.update(interactive=False), } # Define the data and files to post data = {"user_id": f"user_{user_id:.0f}", "root_dir": str(ROOT_DIR)} n_w = glob(f"{CLIENT_ENCRYPTED_INPUT_DIR}/window_*") files = [ ("files", open(f"{CLIENT_KEY_BASE_MODULE_DIR}/eval_key", "rb")), ("files", open(f"{CLIENT_KEY_SMOOTHER_MODULE_DIR}/eval_key", "rb")), ] + [("files", open(f"{CLIENT_ENCRYPTED_INPUT_DIR}/window_{i}", "rb")) for i in range(len(n_w))] # Send the encrypted input and evaluation key to the server url = SERVER_URL + "send_input" print(f"{url=}") with requests.post(url=url, data=data, files=files) as resp: print(f"{resp.ok=}") msg = "Data sent to the Server β " if resp.ok else "β Error in sending data to the Server" yield { send_btn: gr.update(value=msg, interactive=False if "β " in msg else True), run_fhe_btn: gr.update(interactive=True), get_output_btn: gr.update(interactive=True), decrypt_btn: gr.update(interactive=True), } return def run_fhe_fn(user_id): """Run the FHE execution on the Server Side.""" print("\n------------ Step 4.1: Run in FHE on the Server Side") if FHE_COMPUTATION_TIMELINE.exists(): FHE_COMPUTATION_TIMELINE.unlink() print(f"File {FHE_COMPUTATION_TIMELINE} removed successfully.") if is_none(int(user_id)) or len(glob(f"{SERVER_ENCRYPTED_INPUT_DIR}/encrypted_window_*")) == 0: yield { run_fhe_btn: gr.update( visible=True, value="β Check your connectivity. Ensure the input has been submitted, the keys have been generated, and the server has received the data.", ) } return yield { run_fhe_btn: gr.update( visible=True, value="π Processing... Please wait. This may take up to 500 seconds." ), get_output_btn: gr.update(interactive=False), decrypt_btn: gr.update(interactive=False), } data = { "user_id": f"user_{user_id:.0f}", "root_dir": str(ROOT_DIR), } url = SERVER_URL + "run_fhe" # Function to run FHE on the server in a separate thread def run_fhe_on_server(): nonlocal server_response with requests.post(url=url, data=data) as resp: if not resp.ok: server_response = "error" else: server_response = resp.json() server_response = None # Start the FHE process in a separate thread server_thread = threading.Thread(target=run_fhe_on_server) server_thread.start() # While the server is processing, check the timing file for updates while server_thread.is_alive(): try: with FHE_COMPUTATION_TIMELINE.open("r", encoding="utf-8") as f: timing = f.read().strip() yield { run_fhe_btn: gr.update(visible=True, value=f"β³ Time elapsed: {timing}"), } except FileNotFoundError: yield { run_fhe_btn: gr.update(visible=True, value="β³ Waiting for the server to start..."), } time.sleep(5) # Wait a few seconds before reading again # Wait for the thread to finish server_thread.join() # Handle server response after completion if server_response == "error": yield { run_fhe_btn: gr.update( visible=True, value="β Error occurred on the Server Side. Please check your connectivity.", ), } else: final_time = server_response yield { run_fhe_btn: gr.update( visible=True, interactive=False, value=f"FHE executed in {final_time:.2f} seconds" ), get_output_btn: gr.update(interactive=True), decrypt_btn: gr.update(interactive=True), } def get_output_fn(user_id): """Retreive the encrypted data from the server.""" print("\n------------ Step 5.1: Get output") if is_none(int(user_id)) or len(glob(f"{SERVER_ENCRYPTED_INPUT_DIR}/encrypted_window_*")) == 0: msg = "β Error during data transmission: The server did not receive the data, so the FHE process could not be performed." print(msg) yield {get_output_btn: gr.update(visible=True, value=msg)} return yield { get_output_btn: gr.update(value="π Processing... Please wait."), decrypt_btn: gr.update(interactive=False), } data = { "user_id": f"user_{user_id:.0f}", "root_dir": str(ROOT_DIR), } # Retrieve the encrypted output url = SERVER_URL + "get_output" print(f"{url=}") with requests.post(url=url, data=data) as response: if response.ok: msg = ( "Data sent to the Client β " if response.ok else "β Error in receiving data from the server" ) yhat_encrypted = load_pickle(CLIENT_ENCRYPTED_OUTPUT_DIR / "encrypted_final_output.pkl") assert len(yhat_encrypted) == META["NW"] yield { get_output_btn: gr.update(value=msg, interactive=False if "β " in msg else True), decrypt_btn: gr.update(interactive=True), } return def decrypt_fn(user_id): """Dencrypt the data on the Client Side.""" print("\n------------ Step 6: Decrypt output") if ( is_none(int(user_id)) or not (CLIENT_ENCRYPTED_OUTPUT_DIR / "encrypted_final_output.pkl").is_file() ): print("Error in decryption step: Please run the FHE execution, first.") yield { decrypt_btn: gr.update( visible=True, value="β Ensure the input is precessed and retrieved from the server", ), } return yield {decrypt_btn: gr.update(visible=True, value="π Processing... Please wait.")} yhat_encrypted = load_pickle(CLIENT_ENCRYPTED_OUTPUT_DIR / "encrypted_final_output.pkl") # Retrieve the client API client = FHEModelClient(path_dir=SHARED_SMOOTHER_MODULE_DIR, key_dir=SHARED_SMOOTHER_MODULE_DIR) client.load() yhat = [] for encrypted_i in yhat_encrypted: # Deserialize, decrypt and post-process the encrypted output output = client.deserialize_decrypt_dequantize(encrypted_i) y_pred = numpy.argmax(output, axis=-1)[0] yhat.append(y_pred) yhat = numpy.array(yhat) proportion = compute_distribution(yhat.flatten()) _ = pie_output_plot_img(copy.copy(proportion)) yield { decrypt_btn: gr.update(value="Output decrypted β ", interactive=False), pie_plot_output: gr.update( value=Image.open(PREDICTION_IMG_PATH), visible=True, show_label=False, show_download_button=False, container=False, ), user_id_btn: gr.update(value=None), } return def create_pie_chart(ax, data, title, highlight=False, largest_piece=False, simulation=True): sorted_indices = numpy.argsort(data)[::-1] if data[sorted_indices[0]] == 1: sorted_indices = [sorted_indices[0]] sorted_data = data[sorted_indices] sorted_labels = [LABELS[i] for i in sorted_indices] sorted_colors = [COLORS[i] for i in sorted_indices] ## Keep only the 2 biggest parts if simulation and not data[sorted_indices[0]] == 1: top_data = sorted_data[:2] # First two largest proportions others_data = sorted_data[2:].sum() # Sum of the rest sorted_data = numpy.concatenate([top_data, [others_data]]) # Include "others" sorted_labels = sorted_labels[:2] + ["Others"] # First two labels + "Others" sorted_colors = sorted_colors[:2] + ["#D3D3D3"] # Gray color for "Others" if highlight: explode = [0.15 for _ in range(len(sorted_data))] else: explode = [0.09 if i == 0 else 0 for i in range(len(sorted_data))] wedges, _, _ = ax.pie( sorted_data, labels=sorted_labels, colors=sorted_colors, autopct=lambda x: f"{round(x)}%", pctdistance=0.7, labeldistance=1.1, shadow=True, explode=explode, radius=1.9 if highlight else 1.0, ) if largest_piece: # Highlight the largest wedge with a black edge wedges[0].set_edgecolor("black") wedges[0].set_linewidth(2) ax.set_title(title, fontsize=10, weight="bold") if highlight: for wedge in wedges: wedge.set_edgecolor("black") wedge.set_linewidth(3) ax.set_title(title, fontsize=14, weight="bold") ax.axis("equal") # Ensure the pie chart is drawn as a circle def pie_ethnicity_simulation_plot_img(lineages, labels): """Generates a pie chart for genetic lineage simulation across multiple generations.""" n_generations = len(lineages) - 1 fig, axes = plt.subplots(n_generations, 3, figsize=(10, 4 * n_generations)) fig.suptitle("Genetic Lineage Simulator", fontsize=16, weight="bold", x=1) for gen in range(n_generations): parent1, parent2 = lineages.pop(0) descendant = lineages[0][0] label = labels.pop(0) print(f"Generation {gen}: Parent 1: {parent1} + Parent 2 {parent2} = Child {descendant}") ax_gen = axes[gen] if n_generations > 1 else axes create_pie_chart(ax_gen[0], parent1, label[0]) create_pie_chart(ax_gen[1], parent2, label[1]) # Check if it's the last descendant, highlight it is_last = gen == n_generations - 1 create_pie_chart( ax_gen[2], descendant, f"Last progeny (Generation {n_generations + 1})" if is_last else label[2], highlight=is_last, ) plt.subplots_adjust(right=2) plt.savefig(FAMILY_TREE_IMG_PATH, format="png", bbox_inches="tight") buf = io.BytesIO() plt.savefig(buf, format="png") buf.seek(0) plt.close(fig) return Image.open(buf) def pie_output_plot_img(data, simulation=False): """Generates a pie chart based on the ethnic proportions.""" fig, ax = plt.subplots(figsize=(10, 8)) create_pie_chart( ax, data, "Predicted ethnicity distribution using FHE", highlight=True, largest_piece=True, simulation=simulation, ) plt.savefig(PREDICTION_IMG_PATH, format="png", bbox_inches="tight") buf = io.BytesIO() plt.savefig(buf, format="png") buf.seek(0) plt.close(fig) # Close the plot to free memory return Image.open(buf) CSS = """ #accordion-label { /* Custom styling for the Accordion title */ background-color: #f0f0f0 !important; /* Set the background color to gray */ } #ie_plot_output { /* Target the image output container */ align-items: center; justify-content: center; margin: auto; /* Ensure it is centered */ } """ if __name__ == "__main__": print("Starting demo ...") print(META) reset() with gr.Blocks(css=CSS) as demo: gr.Markdown() gr.Markdown( """
Concrete-ML
β
Documentation
β
Community
β
@zama_fhe
DNA testing platforms analyze your genetic data in the clear, leaving it vulnerable to hacks. With Fully Homomorphic Encryption (FHE), they could perform this analysis on encrypted data, ensuring your sensitive information remains safe, even during processing, allowing to get the knowledge without the risks.
In this demo, we show you how to perform encrypted DNA analysis using FHE and Zama's Concrete ML library
""" ) gr.Markdown() gr.Markdown( """