text
stringlengths 1
2.05k
|
---|
fn neural_net() {
let seed = 694201337;
let mut rng = StdRng::seed_from_u64(seed);
log_nn_table();
let input = Array3::random_using((120, 80, 3), Uniform::<f32>::new(-5., 5.), &mut rng);
let mut neural_net = NeuralNetwork::new();
let kernel = Array4::random_using((32, 5, 5, 3), Uniform::<f32>::new(-10., 10.), &mut rng);
neural_net.add_layer(Box::new(Convolution::new(kernel, vec![120, 80, 3])));
neural_net.add_layer(Box::new(MaxPool::new(2, vec![116, 76, 32])));
neural_net.add_layer(Box::new(Relu::new(vec![58, 38, 32])));
let kernel = Array4::random_using((32, 5, 5, 32), Uniform::<f32>::new(-10., 10.), &mut rng);
neural_net.add_layer(Box::new(Convolution::new(kernel, vec![58, 38, 32])));
neural_net.add_layer(Box::new(MaxPool::new(2, vec![54, 34, 32])));
neural_net.add_layer(Box::new(Relu::new(vec![27, 17, 32])));
neural_net.add_layer(Box::new(Flatten::new(vec![27, 17, 32])));
let weights =
Array2::random_using((1000, 14688), Uniform::<f32>::new(-10.0, 10.0), &mut rng);
let biases = Array1::random_using(1000, Uniform::<f32>::new(-10.0, 10.0), &mut rng);
neural_net.add_layer(Box::new(FullyConnected::new(weights, biases)));
neural_net.add_layer(Box::new(Relu::new(vec![1000])));
let weights = Array2::random_using((5, 1000), Uniform::<f32>::new(-10.0, 10.0), &mut rng);
let biases = Array1::random_using(5, Uniform::<f32>::new(-10.0, 10.0), &mut rng);
neural_net.add_layer(Box::new(FullyConnected::new(weights, biases)));
neural_net.add_layer(Box::new(Normalize::new(vec![5])));
let output = neural_net.apply(&input.into_dyn().view(), 3);
if output.is_some() {
println!("final output (normalized):\n{}", output.unwrap());
} else {
print!("Unsupported dimensionality of input |
Array");
}
} |
fn bench_neural_net(b: &mut Bencher) {
log_nn_table();
let input_json =
fs::read_to_string("./src/json/initial.json").expect("Unable to read file");
let input = serde_json::from_str::<ArcArray<f32, Ix3>>(&input_json).unwrap();
let neural_net = deserialize_model_json("./src/json/model.json");
b.iter(|| neural_net.apply(&input.clone().into_dyn().view(), 3));
}
} |
use std::{fs, io::Write};
use crate::layers::{NNJson, NeuralNetwork};
pub fn deserialize_model_json(path: &str) -> NeuralNetwork {
let model_data = fs::read_to_string(path).expect("Unable to read file");
let model_json = serde_json::from_str::<NNJson>(&model_data).unwrap();
let neural_net: NeuralNetwork = model_json.try_into().unwrap();
neural_net
}
pub |
fn serialize_model_json(path: &str, model: NeuralNetwork) {
let mut file = fs::File::create(path).expect("Error encountered while creating file!");
let model_json: NNJson = model.into();
file.write_all(serde_json::to_string(&model_json).unwrap().as_bytes())
.expect("Unable to write data");
}
pub mod tests {
use ndarray::{ArcArray, Ix3};
use serde_json;
use std::fs;
extern crate test;
use test::Bencher;
use super::*;
use crate::nn::{create_neural_net, log_nn_table}; |
fn serialize_nn_json() {
serialize_model_json("./src/json/nn.json", create_neural_net())
} |
fn deserialize_nn_json() {
log_nn_table();
let input_json =
fs::read_to_string("./src/json/initial.json").expect("Unable to read file");
let input = serde_json::from_str::<ArcArray<f32, Ix3>>(&input_json).unwrap();
let neural_net = deserialize_model_json("./src/json/model.json");
let output = neural_net.apply(&input.into_dyn().view(), 3);
if output.is_some() {
println!("final output (normalized):\n{}", output.unwrap());
} else {
print!("Unsupported dimensionality of input Array");
}
} |
fn serde_full_circle() {
serialize_model_json("./src/json/nn.json", create_neural_net());
log_nn_table();
let input_json =
fs::read_to_string("./src/json/initial.json").expect("Unable to read file");
let input = serde_json::from_str::<ArcArray<f32, Ix3>>(&input_json).unwrap();
let neural_net = deserialize_model_json("./src/json/nn.json");
let output = neural_net.apply(&input.into_dyn().view(), 3);
if output.is_some() {
println!("final output (normalized):\n{}", output.unwrap());
} else {
print!("Unsupported dimensionality of input Array");
}
} |
fn bench_deserialize_neural_net(b: &mut Bencher) {
b.iter(deserialize_nn_json);
} |
fn bench_serialize_neural_net(b: &mut Bencher) {
b.iter(serialize_nn_json);
}
} |
from __future__ |
import annotations |
import typing |
import os
from os |
import path |
import json
from dataclasses |
import dataclass |
import re |
import numpy as np |
class SafeDict(dict):
def __missing__(self, key):
return '{' + key + '}'
circom_template_string = '''pragma circom 2.0.0;
{include}
template Model() {brace_left}
{signal}
{component}
{main}
{brace_right}
component main = Model();
'''
templates: typing.Dict[str, Template] = {
}
def parse_shape(shape: typing.List[int]) -> str:
'''parse shape to integers enclosed by []'''
shape_str = ''
for dim in shape:
shape_str += '[{}]'.format(dim)
return shape_str
def parse_index(shape: typing.List[int]) -> str:
'''parse shape to indices enclosed by []'''
index_str = ''
for i in range(len(shape)):
index_str += '[i{}]'.format(i)
return index_str
@dataclass
class Template:
op_name: str
fpath: str
args: typing.Dict[str]
input_names: typing.List[str] = None
input_dims: typing.List[int] = None
output_names: typing.List[str] = None
output_dims: typing.List[int] = None
def __str__(self) -> str:
args_str = ', '.join(self.args)
args_str = '(' + args_str + ')'
return '{:>20}{:30} {}{}{}{} \t<-- {}'.format(
self.op_name, args_str,
self.input_names, self.input_dims,
self.output_names, self.output_dims,
self.fpath)
def file_parse(fpath):
'''parse circom file and register templates'''
with open(fpath, 'r') as f:
lines = f.read().split('\n')
lines = [l for l in lines if not l.strip().startswith('
lines = ' '.join(lines)
lines = re.sub('/\*.*?\*/', 'IGN', lines)
funcs = re.findall('template (\w+) ?\((.*?)\) ?\{(.*?)\}', lines)
for func in funcs:
op_name = func[0].strip()
args = func[1].split(',')
main = func[2].strip()
assert op_name not in templates, \
'duplicated template: {} in {} vs. {}'.format(
op_name, templates[op_name].fpath, fpath)
signals = re.findall('signal (\w+) (\w+)(.*?);', main)
infos = [[] for i in range(4)]
for sig |
in signals:
sig_types = ['input', 'output']
assert sig[0] in sig_types, sig[1] + ' | ' + main
idx = sig_types.index(sig[0])
infos[idx*2+0].append(sig[1])
sig_dim = sig[2].count('[')
infos[idx*2+1].append(sig_dim)
templates[op_name] = Template(
op_name, fpath,
[a.strip() for a in args],
*infos)
def dir_parse(dir_path, skips=[]):
'''parse circom files in a directory'''
names = os.listdir(dir_path)
for name in names:
if name in skips:
continue
fpath = path.join(dir_path, name)
if os.path.isdir(fpath):
dir_parse(fpath)
elif os.path.isfile(fpath):
if fpath.endswith('.circom'):
file_parse(fpath)
@dataclass
class Signal:
name: str
shape: typing.List[int]
value: typing.Any = None
def inject_signal(self, comp_name: str) -> str:
'''inject signal into the beginning of the circuit'''
if self.value is not None or self.name == 'out' or self.name == 'remainder':
return 'signal input {}_{}{};\n'.format(
comp_name, self.name, parse_shape(self.shape))
return ''
def inject_main(self, comp_name: str, prev_comp_name: str = None, prev_signal: Signal = None) -> str:
'''inject signal into main'''
inject_str = ''
if self.value is not None or self.name == 'out' or self.name == 'remainder':
if comp_name.endswith('softmax') and self.name == 'out':
inject_str += '{}.out <== {}_out[0];\n'.format(
comp_name, comp_name)
return inject_str
for i in range(len(self.shape)):
inject_str += '{}for (var i{} = 0; i{} < {}; i{}++) {{\n'.format(
' '*i*4, i, i, self.shape[i], i)
if 'activation' in comp_name or 're_lu' in comp_name:
inject_str += '{}{}{}.{} <== {}_{}{};\n'.f |
ormat(' '*(i+1)*4,
comp_name, parse_index(self.shape), self.name,
comp_name, self.name, parse_index(self.shape))
else:
inject_str += '{}{}.{}{} <== {}_{}{};\n'.format(' '*(i+1)*4,
comp_name, self.name, parse_index(self.shape),
comp_name, self.name, parse_index(self.shape))
inject_str += '}'*len(self.shape)+'\n'
return inject_str
if self.shape != prev_signal.shape:
raise ValueError('shape mismatch: {} vs. {}'.format(self.shape, prev_signal.shape))
for i in range(len(self.shape)):
inject_str += '{}for (var i{} = 0; i{} < {}; i{}++) {{\n'.format(
' '*i*4, i, i, self.shape[i], i)
if 'activation' in comp_name or 're_lu' in comp_name:
inject_str += '{}{}{}.{} <== {}.{}{};\n'.format(' '*(i+1)*4,
comp_name, parse_index(self.shape), self.name,
prev_comp_name, prev_signal.name, parse_index(self.shape))
elif 'activation' in prev_comp_name or 're_lu' in prev_comp_name:
inject_str += '{}{}.{}{} <== {}{}.{};\n'.format(' '*(i+1)*4,
comp_name, self.name, parse_index(self.shape),
prev_comp_name, parse_index(self.shape), prev_signal.name)
else:
inject_str += '{}{}.{}{} <== {}.{}{};\n'.format(' '*(i+1)*4,
comp_name, self.name, parse_index(self.shape),
prev_comp_name, prev_signal.name, parse_index(self.shape))
inject_str += '}'*len(self.shape)+'\n'
return inject_str
def inject_input_signal(self) -> str:
'''inject the circuit input signal'''
if self.value is not None:
raise ValueError('input signal should not have value')
return 'signal input in{};\n'.format(parse_shape(self.shape))
def inject_output_signal |
(self) -> str:
'''inject the circuit output signal'''
if self.value is not None:
raise ValueError('output signal should not have value')
return 'signal output out{};\n'.format(parse_shape(self.shape))
def inject_input_main(self, comp_name: str) -> str:
'''inject the circuit input signal into main'''
if self.value is not None:
raise ValueError('input signal should not have value')
inject_str = ''
for i in range(len(self.shape)):
inject_str += '{}for (var i{} = 0; i{} < {}; i{}++) {{\n'.format(
' '*i*4, i, i, self.shape[i], i)
inject_str += '{}{}.{}{} <== in{};\n'.format(' '*(i+1)*4,
comp_name, self.name, parse_index(self.shape),
parse_index(self.shape))
inject_str += '}'*len(self.shape)+'\n'
return inject_str
def inject_output_main(self, prev_comp_name: str, prev_signal: Signal) -> str:
'''inject the circuit output signal into main'''
if self.value is not None:
raise ValueError('output signal should not have value')
if self.shape != prev_signal.shape:
raise ValueError('shape mismatch: {} vs. {}'.format(self.shape, prev_signal.shape))
if 'softmax' in prev_comp_name:
return 'out[0] <== {}.out;\n'.format(prev_comp_name)
inject_str = ''
for i in range(len(self.shape)):
inject_str += '{}for (var i{} = 0; i{} < {}; i{}++) {{\n'.format(
' '*i*4, i, i, self.shape[i], i)
if 're_lu' in prev_comp_name:
inject_str += '{}out{} <== {}{}.{};\n'.format(' '*(i+1)*4,
parse_index(self.shape),
prev_comp_name, parse_index(self.shape), prev_signal.name)
else:
inject_str += '{}out{} <== {}.{}{};\n'.format(' '*(i+1)*4,
parse_index(self.shape),
prev_comp_na |
me, prev_signal.name, parse_index(self.shape))
inject_str += '}'*len(self.shape)+'\n'
return inject_str
@dataclass
class Component:
name: str
template: Template
inputs: typing.List[Signal]
outputs: typing.List[Signal]
args: typing.Dict[str, typing.Any] = None
def inject_include(self) -> str:
''' |
include the component template'''
return ' |
include "../{}";\n'.format(self.template.fpath)
def inject_signal(self, prev_comp: Component = None, last_comp: bool = False) -> str:
'''inject the component signals'''
inject_str = ''
for signal in self.inputs:
if signal.name == 'out' or signal.name == 'remainder':
inject_str += signal.inject_signal(self.name)
if last_comp is True and signal.name == 'out':
inject_str += signal.inject_output_signal()
elif signal.value is None and prev_comp is None:
inject_str += signal.inject_input_signal()
elif signal.value is not None:
inject_str += signal.inject_signal(self.name)
return inject_str
def inject_component(self) -> str:
'''inject the component declaration'''
if self.template.op_name == 'ReLU':
for signal in self.inputs:
if signal.name == 'out':
output_signal = signal
break
inject_str = 'component {}{};\n'.format(self.name, parse_shape(output_signal.shape))
for i in range(len(output_signal.shape)):
inject_str += '{}for (var i{} = 0; i{} < {}; i{}++) {{\n'.format(
' '*i*4, i, i, output_signal.shape[i], i)
inject_str += '{}{}{} = ReLU();\n'.format(' '*(i+1)*4,
self.name, parse_index(output_signal.shape))
inject_str += '}'*len(output_signal.shape)+'\n'
return inject_str
return 'component {} = {}({});\n'.format(
self.name, self.template.op_name, self.parse_args(self.template.args, self.args))
def inject_main(self, prev_comp: Component = None, last_comp: bool = False) -> str:
'''inject the component main'''
inject_str = ''
for signal in self.inputs:
if signal.value is not None or signal.name == 'out' or signal.name == 'remainder':
inject_str += signal.inject_main(se |
lf.name)
elif prev_comp is None:
inject_str += signal.inject_input_main(self.name)
else:
for sig in prev_comp.inputs:
if sig.name == 'out':
output_signal = sig
break
if output_signal is None:
output_signal = prev_comp.outputs[0]
inject_str += signal.inject_main(self.name, prev_comp.name, output_signal)
print
if last_comp:
for signal in self.inputs:
if signal.name == 'out':
inject_str += signal.inject_output_main(self.name, signal)
break
for signal in self.outputs:
inject_str += signal.inject_output_main(self.name, signal)
return inject_str
def to_json(self, dec: int) -> typing.Dict[str, typing.Any]:
'''convert the component params to json format'''
json_dict = {}
for signal in self.inputs:
if signal.value is not None:
if signal.name == 'bias' or signal.name == 'b':
scaling = float(10**(2*dec))
else:
scaling = float(10**dec)
value = [str(int(v*scaling)) for v in signal.value.flatten().tolist()]
if len(signal.shape) > 1:
value = np.array(value).reshape(signal.shape).tolist()
json_dict.update({f'{self.name}_{signal.name}': value})
return json_dict
@staticmethod
def parse_args(template_args: typing.List[str], args: typing.Dict[str, typing.Any]) -> str:
'''parse the args to a format string, ready to be injected'''
args_str = '{'+'}, {'.join(template_args)+'}'
return args_str.format(**args)
@dataclass
class Circuit:
components: typing.List[Component]
def __init__(self):
self.components = []
def add_component(self, component: Component):
self.components |
.append(component)
def add_components(self, components: typing.List[Component]):
self.components.extend(components)
def inject_include(self) -> str:
'''inject the |
include statements'''
inject_str = []
for component in self.components:
inject_str.append(component.inject_include())
return ''.join(set(inject_str))
def inject_signal(self) -> str:
'''inject the signal declarations'''
inject_str = self.components[0].inject_signal()
for i in range(1, len(self.components)):
inject_str += self.components[i].inject_signal(self.components[i-1], i==len(self.components)-1)
return inject_str
def inject_component(self) -> str:
'''inject the component declarations'''
inject_str = ''
for component in self.components:
inject_str += component.inject_component()
return inject_str
def inject_main(self) -> str:
'''inject the main template'''
inject_str = self.components[0].inject_main()
for i in range(1, len(self.components)):
inject_str += self.components[i].inject_main(self.components[i-1], i==len(self.components)-1)
return inject_str
def to_circom(self) -> str:
'''convert the circuit to a circom file'''
return circom_template_string.format(**{
'include': self.inject_include(),
'brace_left': '{',
'signal': self.inject_signal(),
'component': self.inject_component(),
'main': self.inject_main(),
'brace_right': '}',
})
def to_json(self, dec: int) -> str:
'''convert the model weights to json format'''
json_dict = {}
for component in self.components:
json_dict.update(component.to_json(dec))
return json.dumps(json_dict) |
# Read keras model into list of parameters like op, input, output, weight, bias
from __future__ import annotations
from dataclasses import dataclass
import typing
from tensorflow.keras.models import load_model
from tensorflow.keras.layers import Layer as KerasLayer
import numpy as np
supported_ops = [
'Activation',
'AveragePooling2D',
'BatchNormalization',
'Conv2D',
'Dense',
'Flatten',
'GlobalAveragePooling2D',
'GlobalMaxPooling2D',
'MaxPooling2D',
'ReLU',
'Softmax',
]
skip_ops = [
'Dropout',
'InputLayer',
]
# read each layer in a model and convert it to a class called Layer
@dataclass
class Layer:
''' A single layer in a Keras model. '''
op: str
name: str
input: typing.List[int]
output: typing.List[int]
config: typing.Dict[str, typing.Any]
weights: typing.List[np.ndarray]
def __init__(self, layer: KerasLayer):
self.op = layer.__class__.__name__
self.name = layer.name
self.input = layer.input_shape[1:]
self.output = layer.output_shape[1:]
self.config = layer.get_config()
self.weights = layer.get_weights()
class Model:
layers: typing.List[Layer]
def __init__(self, filename: str, raw: bool = False):
''' Load a Keras model from a file. '''
model = load_model(filename)
self.layers = [Layer(layer) for layer in model.layers if self._for_transpilation(layer.__class__.__name__)]
@staticmethod
def _for_transpilation(op: str) -> bool:
if op in skip_ops:
return False
if op in supported_ops:
return True
raise NotImplementedError(f'Unsupported op: {op}') |
from .circom |
import Circuit, Component
python_template_string = '''""" Make an interger-only circuit of the corresponding CIRCOM circuit.
Usage:
circuit.py <circuit.json> <input.json> [-o <output>]
circuit.py (-h | --help)
Options:
-h --help Show this screen.
-o <output> --output=<output> Output directory [default: output].
"""
from docopt |
import docopt |
import json
try:
from keras2circom.util |
import *
except: |
import sys |
import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from keras2circom.util |
import *
def inference(input, circuit):
out = input['in']
output = {brackets}
{components}
return out, output
def main():
""" Main entry point of the app """
args = docopt(__doc__)
with open(args['<input.json>']) as f:
input = json.load(f)
with open(args['<circuit.json>']) as f:
circuit = json.load(f)
out, output = inference(input, circuit)
with open(args['--output'] + '/output.json', 'w') as f:
json.dump(output, f)
if __name__ == "__main__":
""" This is executed when run from the command line """
main()
'''
def to_py(circuit: Circuit, dec: int) -> str:
comp_str = ""
for component in circuit.components:
comp_str += transpile_component(component, dec)
return python_template_string.format(
brackets="{}",
components=comp_str,
)
def transpile_component(component: Component, dec: int) -> str:
comp_str = ""
if component.template.op_name == "AveragePooling2D":
comp_str += " out, remainder = AveragePooling2DInt({nRows}, {nCols}, {nChannels}, {poolSize}, {strides}, {input})\n".format(
nRows=component.args["nRows"],
nCols=component.args["nCols"],
nChannels=component.args["nChannels"],
poolSize=component.args["poolSize"],
strides=component.args["strides"],
input="out"
)
comp_str += " output['{name}_out'] = out\n".format(
name=component.name,
)
comp_str += " output['{name}_remainder'] = remainder\n".format(
name=component.name,
)
return comp_str+"\n"
elif component.template.op_name == "BatchNormalization2D":
comp_str += " out, remainder = BatchNormalizationInt({nRows}, {nCols}, {nChannels}, {n}, {input}, {a}, {b})\n".format(
nRows=component.args["nRows"],
nCols=component.args["nCols"],
nChannels=component.args["nChannels"],
n=component.args["n"], |
input="out",
a="circuit['{name}_a']".format(name=component.name),
b="circuit['{name}_b']".format(name=component.name),
)
comp_str += " output['{name}_out'] = out\n".format(
name=component.name,
)
comp_str += " output['{name}_remainder'] = remainder\n".format(
name=component.name,
)
return comp_str+"\n"
elif component.template.op_name == "Conv1D":
comp_str += " out, remainder = Conv1DInt({nInputs}, {nChannels}, {nFilters}, {kernelSize}, {strides}, {n}, {input}, {weights}, {bias})\n".format(
nInputs=component.args["nInputs"],
nChannels=component.args["nChannels"],
nFilters=component.args["nFilters"],
kernelSize=component.args["kernelSize"],
strides=component.args["strides"],
n=component.args["n"],
input="out",
weights="circuit['{name}_weights']".format(name=component.name),
bias="circuit['{name}_bias']".format(name=component.name),
)
comp_str += " output['{name}_out'] = out\n".format(
name=component.name,
)
comp_str += " output['{name}_remainder'] = remainder\n".format(
name=component.name,
)
return comp_str+"\n"
elif component.template.op_name == "Conv2D":
comp_str += " out, remainder = Conv2DInt({nRows}, {nCols}, {nChannels}, {nFilters}, {kernelSize}, {strides}, {n}, {input}, {weights}, {bias})\n".format(
nRows=component.args["nRows"],
nCols=component.args["nCols"],
nChannels=component.args["nChannels"],
nFilters=component.args["nFilters"],
kernelSize=component.args["kernelSize"],
strides=component.args["strides"],
n=component.args["n"],
input="out",
weights="circuit['{name}_weights']".format(name=component.name),
bias="circuit['{name}_bias']".format(name=compo |
nent.name),
)
comp_str += " output['{name}_out'] = out\n".format(
name=component.name,
)
comp_str += " output['{name}_remainder'] = remainder\n".format(
name=component.name,
)
return comp_str+"\n"
elif component.template.op_name == "Dense":
comp_str += " out, remainder = DenseInt({nInputs}, {nOutputs}, {n}, {input}, {weights}, {bias})\n".format(
nInputs=component.args["nInputs"],
nOutputs=component.args["nOutputs"],
n=component.args["n"],
input="out",
weights="circuit['{name}_weights']".format(name=component.name),
bias="circuit['{name}_bias']".format(name=component.name),
)
comp_str += " output['{name}_out'] = out\n".format(
name=component.name,
)
comp_str += " output['{name}_remainder'] = remainder\n".format(
name=component.name,
)
return comp_str+"\n"
elif component.template.op_name == "GlobalAveragePooling2D":
comp_str += " out, remainder = GlobalAveragePooling2DInt({nRows}, {nCols}, {nChannels}, {input})\n".format(
nRows=component.args["nRows"],
nCols=component.args["nCols"],
nChannels=component.args["nChannels"],
input="out"
)
comp_str += " output['{name}_out'] = out\n".format(
name=component.name,
)
comp_str += " output['{name}_remainder'] = remainder\n".format(
name=component.name,
)
return comp_str+"\n"
elif component.template.op_name == "GlobalMaxPooling2D":
comp_str += " out = GlobalMaxPooling2DInt({nRows}, {nCols}, {nChannels}, {input})\n".format(
nRows=component.args["nRows"],
nCols=component.args["nCols"],
nChannels=component.args["nChannels"],
input="out"
)
comp_str += " output['{name}_out'] = out\n".format(
name=c |
omponent.name,
)
return comp_str+"\n"
elif component.template.op_name == "MaxPooling2D":
comp_str += " out = MaxPooling2DInt({nRows}, {nCols}, {nChannels}, {poolSize}, {strides}, {input})\n".format(
nRows=component.args["nRows"],
nCols=component.args["nCols"],
nChannels=component.args["nChannels"],
poolSize=component.args["poolSize"],
strides=component.args["strides"],
input="out"
)
comp_str += " output['{name}_out'] = out\n".format(
name=component.name,
)
return comp_str+"\n"
elif component.template.op_name == "Flatten2D":
comp_str += " out = Flatten2DInt({nRows}, {nCols}, {nChannels}, {input})\n".format(
nRows=component.args["nRows"],
nCols=component.args["nCols"],
nChannels=component.args["nChannels"],
input="out"
)
comp_str += " output['{name}_out'] = out\n".format(
name=component.name,
)
return comp_str+"\n"
elif component.template.op_name == "ReLU":
nRows, nCols, nChannels = component.inputs[0].shape
comp_str += " out = ReLUInt({nRows}, {nCols}, {nChannels}, {input})\n".format(
nRows=nRows,
nCols=nCols,
nChannels=nChannels,
input="out"
)
comp_str += " output['{name}_out'] = out\n".format(
name=component.name,
)
return comp_str+"\n"
elif component.template.op_name == "ArgMax":
comp_str += " out = ArgMaxInt(out)\n"
comp_str += " output['{name}_out'] = out\n".format(
name=component.name,
)
return comp_str+"\n"
else:
raise ValueError("Unknown component type: {}".format(component.template.op_name)) |
from .circom |
import *
from .model |
import *
from .script |
import * |
import os
def transpile(filename: str, output_dir: str = 'output', raw: bool = False, dec: int = 18) -> Circuit:
''' Transpile a Keras model to a CIRCOM circuit.'''
model = Model(filename, raw)
circuit = Circuit()
for layer in model.layers[:-1]:
circuit.add_components(transpile_layer(layer, dec))
circuit.add_components(transpile_layer(model.layers[-1], dec, True))
if raw:
if circuit.components[-1].template.op_name == 'ArgMax':
circuit.components.pop()
if not os.path.exists(output_dir):
os.makedirs(output_dir)
with open(output_dir + '/circuit.circom', 'w') as f:
f.write(circuit.to_circom())
with open(output_dir + '/circuit.json', 'w') as f:
f.write(circuit.to_json(int(dec)))
with open(output_dir + '/circuit.py', 'w') as f:
f.write(to_py(circuit, int(dec)))
return circuit
def transpile_layer(layer: Layer, dec: int = 18, last: bool = False) -> typing.List[Component]:
''' Transpile a Keras layer to CIRCOM component(s).'''
if layer.op == 'Activation':
if layer.config['activation'] == 'softmax':
if last:
return transpile_ArgMax(layer)
raise ValueError('Softmax must be the last layer')
if layer.config['activation'] == 'relu':
return transpile_ReLU(layer)
if layer.config['activation'] == 'linear':
return []
raise NotImplementedError(f'Activation {layer.config["activation"]} not implemented')
if layer.op == 'Softmax':
if last:
return transpile_ArgMax(layer)
raise ValueError('Softmax must be the last layer')
if layer.op == 'ReLU':
return transpile_ReLU(layer)
if layer.op == 'AveragePooling2D':
return transpile_AveragePooling2D(layer)
if layer.op == 'BatchNormalization':
return transpile_BatchNormalization2D(layer, dec)
if layer.op == 'Conv2D':
return transpile_Conv2D(layer, dec) |
if layer.op == 'Dense':
return transpile_Dense(layer, dec, last)
if layer.op == 'Flatten':
return transpile_Flatten2D(layer)
if layer.op == 'GlobalAveragePooling2D':
return transpile_GlobalAveragePooling2D(layer)
if layer.op == 'GlobalMaxPooling2D':
return transpile_GlobalMaxPooling2D(layer)
if layer.op == 'MaxPooling2D':
return transpile_MaxPooling2D(layer)
raise NotImplementedError(f'Layer {layer.op} is not supported yet.')
def transpile_ArgMax(layer: Layer) -> typing.List[Component]:
return [Component(layer.name, templates['ArgMax'], [Signal('in', layer.output), Signal('out', (1,))], [], {'n': layer.output[0]})]
def transpile_ReLU(layer: Layer) -> typing.List[Component]:
return [Component(layer.name, templates['ReLU'], [Signal('in', layer.output), Signal('out', layer.output)], [])]
def transpile_AveragePooling2D(layer: Layer) -> typing.List[Component]:
if layer.config['data_format'] != 'channels_last':
raise NotImplementedError('Only data_format="channels_last" is supported')
if layer.config['padding'] != 'valid':
raise NotImplementedError('Only padding="valid" is supported')
if layer.config['pool_size'][0] != layer.config['pool_size'][1]:
raise NotImplementedError('Only pool_size[0] == pool_size[1] is supported')
if layer.config['strides'][0] != layer.config['strides'][1]:
raise NotImplementedError('Only strides[0] == strides[1] is supported')
return [Component(layer.name, templates['AveragePooling2D'], [Signal('in', layer.input), Signal('out', layer.output), Signal('remainder', layer.output)],[],{
'nRows': layer.input[0],
'nCols': layer.input[1],
'nChannels': layer.input[2],
'poolSize': layer.config['pool_size'][0],
'strides': layer.config['strides'][0],
})]
def transpile_BatchNormalization2D(layer: Layer, dec: int) -> typing.List[Component]:
if layer.input.__len__() != 3:
raise NotImplement |
edError('Only 2D inputs are supported')
if layer.config['axis'][0] != 3:
raise NotImplementedError('Only axis=3 is supported')
if layer.config['center'] != True:
raise NotImplementedError('Only center=True is supported')
if layer.config['scale'] != True:
raise NotImplementedError('Only scale=True is supported')
gamma = layer.weights[0]
beta = layer.weights[1]
moving_mean = layer.weights[2]
moving_var = layer.weights[3]
epsilon = layer.config['epsilon']
a = gamma/(moving_var+epsilon)**.5
b = beta-gamma*moving_mean/(moving_var+epsilon)**.5
return [Component(layer.name, templates['BatchNormalization2D'], [
Signal('in', layer.input),
Signal('a', a.shape, a),
Signal('b', b.shape, b),
Signal('out', layer.output),
Signal('remainder', layer.output),
],[],{
'nRows': layer.input[0],
'nCols': layer.input[1],
'nChannels': layer.input[2],
'n': '10**'+dec,
})]
def transpile_Conv2D(layer: Layer, dec: int) -> typing.List[Component]:
if layer.config['data_format'] != 'channels_last':
raise NotImplementedError('Only data_format="channels_last" is supported')
if layer.config['padding'] != 'valid':
raise NotImplementedError('Only padding="valid" is supported')
if layer.config['strides'][0] != layer.config['strides'][1]:
raise NotImplementedError('Only strides[0] == strides[1] is supported')
if layer.config['kernel_size'][0] != layer.config['kernel_size'][1]:
raise NotImplementedError('Only kernel_size[0] == kernel_size[1] is supported')
if layer.config['dilation_rate'][0] != 1:
raise NotImplementedError('Only dilation_rate[0] == 1 is supported')
if layer.config['dilation_rate'][1] != 1:
raise NotImplementedError('Only dilation_rate[1] == 1 is supported')
if layer.config['groups'] != 1:
raise NotImplementedError('Only groups == 1 is supported')
if layer.config['activation'] not in [ |
'linear', 'relu']:
raise NotImplementedError(f'Activation {layer.config["activation"]} is not supported')
if layer.config['use_bias'] == False:
layer.weights.append(np.zeros(layer.weights[0].shape[-1]))
conv = Component(layer.name, templates['Conv2D'], [
Signal('in', layer.input),
Signal('weights', layer.weights[0].shape, layer.weights[0]),
Signal('bias', layer.weights[1].shape, layer.weights[1]),
Signal('out', layer.output),
Signal('remainder', layer.output),
],[],{
'nRows': layer.input[0],
'nCols': layer.input[1],
'nChannels': layer.input[2],
'nFilters': layer.config['filters'],
'kernelSize': layer.config['kernel_size'][0],
'strides': layer.config['strides'][0],
'n': '10**'+dec,
})
if layer.config['activation'] == 'relu':
activation = Component(layer.name+'_re_lu', templates['ReLU'], [Signal('in', layer.output), Signal('out', layer.output)], [])
return [conv, activation]
return [conv]
def transpile_Dense(layer: Layer, dec: int, last: bool = False) -> typing.List[Component]:
if not last and layer.config['activation'] == 'softmax':
raise NotImplementedError('Softmax is only supported as last layer')
if layer.config['activation'] not in ['linear', 'relu', 'softmax']:
raise NotImplementedError(f'Activation {layer.config["activation"]} is not supported')
if layer.config['use_bias'] == False:
layer.weights.append(np.zeros(layer.weights[0].shape[-1]))
dense = Component(layer.name, templates['Dense'], [
Signal('in', layer.input),
Signal('weights', layer.weights[0].shape, layer.weights[0]),
Signal('bias', layer.weights[1].shape, layer.weights[1]),
Signal('out', layer.output),
Signal('remainder', layer.output),
],[],{
'nInputs': layer.input[0],
'nOutputs': layer.output[0],
'n': '10**'+dec,
})
if layer.config['activa |
tion'] == 'relu':
activation = Component(layer.name+'_re_lu', templates['ReLU'], [Signal('in', layer.output), Signal('out', layer.output)], [])
return [dense, activation]
if layer.config['activation'] == 'softmax':
activation = Component(layer.name+'_softmax', templates['ArgMax'], [Signal('in', layer.output), Signal('out', (1,))], [], {'n': layer.output[0]})
return [dense, activation]
return [dense]
def transpile_Flatten2D(layer: Layer) -> typing.List[Component]:
if layer.input.__len__() != 3:
raise NotImplementedError('Only 2D inputs are supported')
return [Component(layer.name, templates['Flatten2D'], [
Signal('in', layer.input),
Signal('out', layer.output),
],[],{
'nRows': layer.input[0],
'nCols': layer.input[1],
'nChannels': layer.input[2],
})]
def transpile_GlobalAveragePooling2D(layer: Layer) -> typing.List[Component]:
if layer.config['data_format'] != 'channels_last':
raise NotImplementedError('Only data_format="channels_last" is supported')
if layer.config['keepdims']:
raise NotImplementedError('Only keepdims=False is supported')
return [Component(layer.name, templates['GlobalAveragePooling2D'], [
Signal('in', layer.input),
Signal('out', layer.output),
Signal('remainder', layer.output),
],[],{
'nRows': layer.input[0],
'nCols': layer.input[1],
'nChannels': layer.input[2],
})]
def transpile_GlobalMaxPooling2D(layer: Layer) -> typing.List[Component]:
if layer.config['data_format'] != 'channels_last':
raise NotImplementedError('Only data_format="channels_last" is supported')
if layer.config['keepdims']:
raise NotImplementedError('Only keepdims=False is supported')
return [Component(layer.name, templates['GlobalMaxPooling2D'], [
Signal('in', layer.input),
Signal('out', layer.output),
],[],{
'nRows': layer.input[0],
'nCols': |
layer.input[1],
'nChannels': layer.input[2],
})]
def transpile_MaxPooling2D(layer: Layer) -> typing.List[Component]:
if layer.config['data_format'] != 'channels_last':
raise NotImplementedError('Only data_format="channels_last" is supported')
if layer.config['padding'] != 'valid':
raise NotImplementedError('Only padding="valid" is supported')
if layer.config['pool_size'][0] != layer.config['pool_size'][1]:
raise NotImplementedError('Only pool_size[0] == pool_size[1] is supported')
if layer.config['strides'][0] != layer.config['strides'][1]:
raise NotImplementedError('Only strides[0] == strides[1] is supported')
return [Component(layer.name, templates['MaxPooling2D'], [Signal('in', layer.input), Signal('out', layer.output)], [],{
'nRows': layer.input[0],
'nCols': layer.input[1],
'nChannels': layer.input[2],
'poolSize': layer.config['pool_size'][0],
'strides': layer.config['strides'][0],
})] |
def AveragePooling2DInt (nRows, nCols, nChannels, poolSize, strides, input):
out = [[[0 for _ in range(nChannels)] for _ in range((nCols-poolSize)
remainder = [[[None for _ in range(nChannels)] for _ in range((nCols-poolSize)
for i in range((nRows-poolSize)
for j in range((nCols-poolSize)
for k in range(nChannels):
for x in range(poolSize):
for y in range(poolSize):
out[i][j][k] += int(input[i*strides+x][j*strides+y][k])
remainder[i][j][k] = str(out[i][j][k] % poolSize**2)
out[i][j][k] = str(out[i][j][k]
return out, remainder
def BatchNormalizationInt(nRows, nCols, nChannels, n, X_in, a_in, b_in):
out = [[[None for _ in range(nChannels)] for _ in range(nCols)] for _ in range(nRows)]
remainder = [[[None for _ in range(nChannels)] for _ in range(nCols)] for _ in range(nRows)]
for i in range(nRows):
for j in range(nCols):
for k in range(nChannels):
out[i][j][k] = int(X_in[i][j][k])*int(a_in[k]) + int(b_in[k])
remainder[i][j][k] = str(out[i][j][k] % n)
out[i][j][k] = str(out[i][j][k]
return out, remainder
def Conv1DInt(nInputs, nChannels, nFilters, kernelSize, strides, n, input, weights, bias):
out = [[0 for _ in range(nFilters)] for j in range((nInputs - kernelSize)
remainder = [[None for _ in range(nFilters)] for _ in range((nInputs - kernelSize)
for i in range((nInputs - kernelSize)
for j in range(nFilters):
for k in range(kernelSize):
for l in range(nChannels):
out[i][j] += int(input[i*strides + k][l])*int(weights[k][l][j])
out[i][j] += int(bias[j])
remainder[i][j] = str(out[i][j] % n)
out[i][j] = str(out[i][j]
return out, remainder
def Conv2DInt(nRows, nCols, nChannels, nFilters, kernelSize, strides, n, input, weights, bias):
out = [[[0 for _ in range(nFilters)] for _ in range((nCols - ke |
rnelSize)
remainder = [[[None for _ in range(nFilters)] for _ in range((nCols - kernelSize)
for i in range((nRows - kernelSize)
for j in range((nCols - kernelSize)
for m in range(nFilters):
for k in range(nChannels):
for x in range(kernelSize):
for y in range(kernelSize):
out[i][j][m] += int(input[i*strides+x][j*strides+y][k])*int(weights[x][y][k][m])
out[i][j][m] += int(bias[m])
remainder[i][j][m] = str(out[i][j][m] % n)
out[i][j][m] = str(out[i][j][m]
return out, remainder
def DenseInt(nInputs, nOutputs, n, input, weights, bias):
out = [0 for _ in range(nOutputs)]
remainder = [None for _ in range(nOutputs)]
for j in range(nOutputs):
for i in range(nInputs):
out[j] += int(input[i])*int(weights[i][j])
out[j] += int(bias[j])
remainder[j] = str(out[j] % n)
out[j] = str(out[j]
return out, remainder
def GlobalAveragePooling2DInt(nRows, nCols, nChannels, input):
out = [0 for _ in range(nChannels)]
remainder = [None for _ in range(nChannels)]
for k in range(nChannels):
for i in range(nRows):
for j in range(nCols):
out[k] += int(input[i][j][k])
remainder[k] = str(out[k] % (nRows * nCols))
out[k] = str(out[k]
return out, remainder
def GlobalMaxPooling2DInt(nRows, nCols, nChannels, input):
out = [max(int(input[i][j][k]) for i in range(nRows) for j in range(nCols)) for k in range(nChannels)]
return out
def MaxPooling2DInt(nRows, nCols, nChannels, poolSize, strides, input):
out = [[[str(max(int(input[i*strides + x][j*strides + y][k]) for x in range(poolSize) for y in range(poolSize))) for k in range(nChannels)] for j in range((nCols - poolSize)
return out
def Flatten2DInt(nRows, nCols, nChannels, input):
out = [str(int(input[i][j][k])) for i in range(nRows) for j in range(nCols) for k in range(nChannels)] |
return out
def ReLUInt(nRows, nCols, nChannels, input):
out = [[[str(max(int(input[i][j][k]), 0)) for k in range(nChannels)] for j in range(nCols)] for i in range(nRows)]
return out
def ArgMaxInt(input):
return [input.index(str(max(int(input[i]) for i in range(len(input)))))] |
""" Transpile a Keras model to a CIRCOM circuit.
Usage:
main.py <model.h5> [-o <output>] [--raw] [-d <decimals>]
main.py (-h | --help)
Options:
-h --help Show this screen.
-o <output> --output=<output> Output directory [default: output].
--raw Output raw model outputs instead of the argmax of outputs [default: False].
-d <decimals> --decimals=<decimals> Number of decimals for model precision [default: 18].
"""
from docopt import docopt
from keras2circom import circom, transpiler
def main():
""" Main entry point of the app """
args = docopt(__doc__)
circom.dir_parse('node_modules/circomlib-ml/circuits/', skips=['util.circom', 'circomlib-matrix', 'circomlib', 'crypto'])
transpiler.transpile(args['<model.h5>'], args['--output'], args['--raw'], args['--decimals'])
if __name__ == "__main__":
""" This is executed when run from the command line """
main() |
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"
"from tensorflow.keras.layers |
import (\n",
" Input,\n",
" Activation,\n",
" AveragePooling2D,\n",
" BatchNormalization,\n",
" Conv2D,\n",
" Dense,\n",
" Dropout,\n",
" Flatten,\n",
" GlobalAveragePooling2D,\n",
" GlobalMaxPooling2D,\n",
" MaxPooling2D,\n",
" ReLU,\n",
" Softmax,\n",
" )\n",
"from tensorflow.keras |
import Model\n",
"from tensorflow.keras.datasets |
import mnist\n",
"from tensorflow.keras.utils |
import to_categorical\n",
" |
import numpy as np\n",
" |
import matplotlib.pyplot as plt\n",
" |
import tensorflow as tf"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"
"(X_train, y_train), (X_test, y_test) = mnist.load_data()"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"
"y_train = to_categorical(y_train)\n",
"y_test = to_categorical(y_test)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"
"X_train = X_train.reshape(X_train.shape[0], 28, 28, 1)\n",
"X_test = X_test.reshape(X_test.shape[0], 28, 28, 1)\n",
"\n",
"
"X_train = X_train.astype('float32')\n",
"X_test = X_test.astype('float32')\n",
"X_train /= 255.0\n",
"X_test /= 255.0"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"inputs = Input(shape=(28,28,1))\n",
"out = Conv2D(4, 3, use_bias=False)(inputs)\n",
"out = BatchNormalization()(out)\n",
"out = Activation('relu')(out)\n",
"out = MaxPooling2D()(out)\n",
"out = Conv2D(8, 3, use_bias=True, strides=2)(out)\n",
"out = ReLU()(out)\n",
"out = AveragePooling2D()(out)\n",
"out = Flatten()(out)\n",
"
"out = Dense(10, activation=\"softmax\")(out)\n",
"model = Model(inputs, out)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model: \"model\"\n",
"_________________________________________________________________\n",
" Layer (type) Output Shape Param
"=================================================================\n",
" input_1 (InputLayer) [(None, 28, 28, 1)] 0 \n",
" \n",
" conv2d (Conv2D) (None, 26, 26, 4) 36 \n",
" |
\n",
" batch_normalization (BatchN (None, 26, 26, 4) 16 \n",
" ormalization) \n",
" \n",
" activation (Activation) (None, 26, 26, 4) 0 \n",
" \n",
" max_pooling2d (MaxPooling2D (None, 13, 13, 4) 0 \n",
" ) \n",
" \n",
" conv2d_1 (Conv2D) (None, 6, 6, 8) 296 \n",
" \n",
" re_lu (ReLU) (None, 6, 6, 8) 0 \n",
" \n",
" average_pooling2d (AverageP (None, 3, 3, 8) 0 \n",
" ooling2D) \n",
" \n",
" flatten (Flatten) (None, 72) 0 \n",
" \n",
" dense (Dense) (None, 10) 730 \n",
" \n",
"=================================================================\n",
"Total params: 1,078\n",
"Trainable params: 1,070\n",
"Non-trainable params: 8\n",
"_________________________________________________________________\n"
]
}
],
"source": [
"model.summary()"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"model.compile(\n",
" loss='categorical_crossentropy',\n",
" optimizer='adam',\ |
n",
" metrics=['acc']\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/15\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-11-26 21:47:52.776729: W tensorflow/core/platform/profile_utils/cpu_utils.cc:128] Failed to get CPU frequency: 0 Hz\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"1875/1875 [==============================] - 11s 6ms/step - loss: 0.5203 - acc: 0.8386 - val_loss: 0.2099 - val_acc: 0.9363\n",
"Epoch 2/15\n",
"1875/1875 [==============================] - 11s 6ms/step - loss: 0.1926 - acc: 0.9419 - val_loss: 0.1497 - val_acc: 0.9543\n",
"Epoch 3/15\n",
"1875/1875 [==============================] - 10s 5ms/step - loss: 0.1551 - acc: 0.9522 - val_loss: 0.1263 - val_acc: 0.9591\n",
"Epoch 4/15\n",
"1875/1875 [==============================] - 10s 5ms/step - loss: 0.1361 - acc: 0.9580 - val_loss: 0.1139 - val_acc: 0.9628\n",
"Epoch 5/15\n",
"1875/1875 [==============================] - 10s 5ms/step - loss: 0.1253 - acc: 0.9617 - val_loss: 0.1031 - val_acc: 0.9679\n",
"Epoch 6/15\n",
"1875/1875 [==============================] - 11s 6ms/step - loss: 0.1168 - acc: 0.9636 - val_loss: 0.0976 - val_acc: 0.9697\n",
"Epoch 7/15\n",
"1875/1875 [==============================] - 10s 5ms/step - loss: 0.1113 - acc: 0.9650 - val_loss: 0.0923 - val_acc: 0.9711\n",
"Epoch 8/15\n",
"1875/1875 [==============================] - 10s 5ms/step - loss: 0.1072 - acc: 0.9673 - val_loss: 0.0884 - val_acc: 0.9732\n",
"Epoch 9/15\n",
"1875/1875 [==============================] - 12s 7ms/step - loss: 0.1026 - acc: 0.9683 - val_loss: 0.0879 - val_acc: 0.9725\n",
"Epoch 10/15\n",
"1875/1875 [==============================] - 11s 6ms/step - loss: 0.0999 - acc: 0. |
9691 - val_loss: 0.0928 - val_acc: 0.9719\n",
"Epoch 11/15\n",
"1875/1875 [==============================] - 10s 5ms/step - loss: 0.0968 - acc: 0.9702 - val_loss: 0.0954 - val_acc: 0.9699\n",
"Epoch 12/15\n",
"1875/1875 [==============================] - 10s 5ms/step - loss: 0.0945 - acc: 0.9706 - val_loss: 0.0841 - val_acc: 0.9740\n",
"Epoch 13/15\n",
"1875/1875 [==============================] - 10s 5ms/step - loss: 0.0926 - acc: 0.9718 - val_loss: 0.0826 - val_acc: 0.9748\n",
"Epoch 14/15\n",
"1875/1875 [==============================] - 10s 5ms/step - loss: 0.0893 - acc: 0.9723 - val_loss: 0.0803 - val_acc: 0.9751\n",
"Epoch 15/15\n",
"1875/1875 [==============================] - 10s 5ms/step - loss: 0.0892 - acc: 0.9723 - val_loss: 0.0767 - val_acc: 0.9757\n"
]
},
{
"data": {
"text/plain": [
"<keras.callbacks.History at 0x177842d60>"
]
},
"execution_count": 8,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.fit(X_train, y_train, epochs=15, batch_size=32, validation_data=(X_test, y_test))"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"model.save('model.h5')"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "keras2circom",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.13"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "71414dc221f26c27f268040756e42b4f7499507456a67f7434828e3314a20678"
}
}
},
"nbformat": 4,
"nbformat_minor": 2
} |
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!cd .. && python main.py models/model.h5"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
" |
import sys\n",
" |
import os\n",
"
"sys.path.append(os.path.dirname((os.getcwd())))\n",
"from output.circuit |
import inference\n",
" |
import json"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"with open('../output/circuit.json') as f:\n",
" circuit = json.load(f)\n",
"\n",
"with open('y_test.json') as f:\n",
" y_test = json.load(f)"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
"
" |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.