text
stringlengths 1
2.05k
|
---|
..");
let original_predictions = Model::run_onnx_predictions(
&settings.run_args,
&model_path,
&chunks,
model.graph.input_shapes()?,
)?;
let range = if let Some(scales) = scales {
scales
} else {
(11..14).collect::<Vec<crate::Scale>>()
};
let div_rebasing = if only_range_check_rebase {
vec![false]
} else {
vec![true, false]
};
let mut found_params: Vec<GraphSettings> = vec![];
let range_grid = range
.iter()
.cartesian_product(range.iter())
.map(|(a, b)| (*a, *b))
.collect::<Vec<(crate::Scale, crate::Scale)>>();
let mut range_grid = range_grid
.into_iter()
.filter(|(a, b)| a <= b)
.collect::<Vec<(crate::Scale, crate::Scale)>>();
let all_scale_0 = model
.graph
.get_input_types()?
.iter()
.all(|t| t.is_integer());
if all_scale_0 {
range_grid = range_grid
.iter()
.map(|(_, b)| (0, *b))
.sorted()
.dedup()
.collect::<Vec<(crate::Scale, crate::Scale)>>();
}
let range_grid = range_grid
.iter()
.cartesian_product(scale_rebase_multiplier.iter())
.map(|(a, b)| (*a, *b))
.collect::<Vec<((crate::Scale, crate::Scale), u32)>>();
let range_grid = range_grid
.iter()
.cartesian_product(div_rebasing.iter())
.map(|(a, b)| (*a, *b))
.collect::<Vec<(((crate::Scale, crate::Scale), u32), bool)>>();
let mut forward_pass_res = HashMap::new();
let pb = init_bar(range_grid.len() as u64);
pb.set_message("calibrating...");
let mut num_failed = 0;
let mut num_passed = 0;
for (((input_scale, param_scale), scale_rebase_multiplier), div_rebasing) in range_grid {
pb.set_message(format!(
"i-scale: {}, p-scale: {}, rebase-(x): {}, div-rebase: {}, fail: {}, pass: {}",
input_scale.to_string().blue(),
param_s |
cale.to_string().blue(),
scale_rebase_multiplier.to_string().blue(),
div_rebasing.to_string().yellow(),
num_failed.to_string().red(),
num_passed.to_string().green()
));
let key = (
input_scale,
param_scale,
scale_rebase_multiplier,
div_rebasing,
);
forward_pass_res.insert(key, vec![]);
let local_run_args = RunArgs {
input_scale,
param_scale,
scale_rebase_multiplier,
div_rebasing,
..settings.run_args.clone()
};
let _r = match Gag::stdout() {
Ok(g) => Some(g),
_ => None,
};
let _g = match Gag::stderr() {
Ok(g) => Some(g),
_ => None,
};
let mut circuit = match GraphCircuit::from_run_args(&local_run_args, &model_path) {
Ok(c) => c,
Err(e) => {
error!("circuit creation from run args failed: {:?}", e);
pb.inc(1);
num_failed += 1;
continue;
}
};
let forward_res = chunks
.iter()
.map(|chunk| {
let chunk = chunk.clone();
let data = circuit
.load_graph_from_file_exclusively(&chunk)
.map_err(|e| format!("failed to load circuit inputs: {}", e))?;
let forward_res = circuit
.forward::<KZGCommitmentScheme<Bn256>>(&mut data.clone(), None, None, true)
.map_err(|e| format!("failed to forward: {}", e))?;
forward_pass_res
.get_mut(&key)
.ok_or("key not found")?
.push(forward_res);
Ok(()) as Result<(), String>
})
.collect::<Result<Vec<()>, String>>();
match forward_res {
Ok(_) => (), |
Err(e) => {
error!("forward pass failed: {:?}", e);
pb.inc(1);
num_failed += 1;
continue;
}
}
drop(_r);
drop(_g);
let result = forward_pass_res.get(&key).ok_or("key not found")?;
let min_lookup_range = result
.iter()
.map(|x| x.min_lookup_inputs)
.min()
.unwrap_or(0);
let max_lookup_range = result
.iter()
.map(|x| x.max_lookup_inputs)
.max()
.unwrap_or(0);
let max_range_size = result.iter().map(|x| x.max_range_size).max().unwrap_or(0);
let res = circuit.calc_min_logrows(
(min_lookup_range, max_lookup_range),
max_range_size,
max_logrows,
lookup_safety_margin,
);
if res.is_ok() {
let new_settings = circuit.settings().clone();
let found_run_args = RunArgs {
input_scale: new_settings.run_args.input_scale,
param_scale: new_settings.run_args.param_scale,
div_rebasing: new_settings.run_args.div_rebasing,
lookup_range: new_settings.run_args.lookup_range,
logrows: new_settings.run_args.logrows,
scale_rebase_multiplier: new_settings.run_args.scale_rebase_multiplier,
..settings.run_args.clone()
};
let found_settings = GraphSettings {
run_args: found_run_args,
required_lookups: new_settings.required_lookups,
required_range_checks: new_settings.required_range_checks,
model_output_scales: new_settings.model_output_scales,
model_input_scales: new_settings.model_input_scales,
num_rows: new_settings.num_rows,
total_assignments: new_settings.total_assignments,
total_const_size: new_settings.total_const_size, |
..settings.clone()
};
found_params.push(found_settings.clone());
debug!(
"found settings: \n {}",
found_settings.as_json()?.to_colored_json_auto()?
);
num_passed += 1;
} else {
error!("calibration failed {}", res.err().unwrap());
num_failed += 1;
}
pb.inc(1);
}
pb.finish_with_message("Calibration Done.");
if found_params.is_empty() {
return Err("calibration failed, could not find any suitable parameters given the calibration dataset".into());
}
debug!("Found {} sets of parameters", found_params.len());
let mut best_params = match target {
CalibrationTarget::Resources { .. } => {
let mut param_iterator = found_params.iter().sorted_by_key(|p| p.run_args.logrows);
let min_logrows = param_iterator
.next()
.ok_or("no params found")?
.run_args
.logrows;
found_params
.iter()
.filter(|p| p.run_args.logrows == min_logrows)
.max_by_key(|p| {
(
p.run_args.input_scale,
p.run_args.param_scale,
p.run_args.scale_rebase_multiplier,
)
})
.ok_or("no params found")?
.clone()
}
CalibrationTarget::Accuracy => {
let param_iterator = found_params.iter().sorted_by_key(|p| {
(
p.run_args.input_scale,
p.run_args.param_scale,
p.run_args.scale_rebase_multiplier,
)
});
let last = param_iterator.last().ok_or("no params found")?;
let max_scale = (
last.run_args.input_scale,
last |
.run_args.param_scale,
last.run_args.scale_rebase_multiplier,
);
found_params
.iter()
.filter(|p| {
(
p.run_args.input_scale,
p.run_args.param_scale,
p.run_args.scale_rebase_multiplier,
) == max_scale
})
.min_by_key(|p| p.run_args.logrows)
.ok_or("no params found")?
.clone()
}
};
let outputs = forward_pass_res
.get(&(
best_params.run_args.input_scale,
best_params.run_args.param_scale,
best_params.run_args.scale_rebase_multiplier,
best_params.run_args.div_rebasing,
))
.ok_or("no params found")?
.iter()
.map(|x| x.get_float_outputs(&best_params.model_output_scales))
.collect::<Vec<_>>();
let accuracy_res = AccuracyResults::new(
original_predictions.into_iter().flatten().collect(),
outputs.into_iter().flatten().collect(),
)?;
let tear_sheet_table = Table::new(vec![accuracy_res]);
warn!(
"\n\n <------------- Numerical Fidelity Report (input_scale: {}, param_scale: {}, scale_input_multiplier: {}) ------------->\n\n{}\n\n",
best_params.run_args.input_scale,
best_params.run_args.param_scale,
best_params.run_args.scale_rebase_multiplier,
tear_sheet_table.to_string().as_str()
);
if matches!(target, CalibrationTarget::Resources { col_overflow: true }) {
let lookup_log_rows = best_params.lookup_log_rows_with_blinding();
let module_log_row = best_params.module_constraint_logrows_with_blinding();
let instance_logrows = best_params.log2_total_instances_with_blinding();
let dynamic_lookup_logrows = best_params.dynamic_lookup_and_shuffle_logrows_with_blinding();
let mut reduction = std::cmp::max(lookup_log_rows, module_log_ro |
w);
reduction = std::cmp::max(reduction, instance_logrows);
reduction = std::cmp::max(reduction, dynamic_lookup_logrows);
reduction = std::cmp::max(reduction, crate::graph::MIN_LOGROWS);
info!(
"logrows > bits, shrinking logrows: {} -> {}",
best_params.run_args.logrows, reduction
);
best_params.run_args.logrows = reduction;
}
best_params.save(&settings_path)?;
debug!("Saved parameters.");
Ok(best_params)
}
pub(crate) fn mock(
compiled_circuit_path: PathBuf,
data_path: PathBuf,
) -> Result<String, Box<dyn Error>> {
let mut circuit = GraphCircuit::load(compiled_circuit_path)?;
let data = GraphWitness::from_path(data_path)?;
circuit.load_graph_witness(&data)?;
let public_inputs = circuit.prepare_public_inputs(&data)?;
info!("Mock proof");
let prover = halo2_proofs::dev::MockProver::run(
circuit.settings().run_args.logrows,
&circuit,
vec![public_inputs],
)
.map_err(Box::<dyn Error>::from)?;
prover
.verify()
.map_err(|e| Box::<dyn Error>::from(ExecutionError::VerifyError(e)))?;
Ok(String::new())
}
pub(crate) fn create_evm_verifier(
vk_path: PathBuf,
srs_path: Option<PathBuf>,
settings_path: PathBuf,
sol_code_path: PathBuf,
abi_path: PathBuf,
render_vk_seperately: bool,
) -> Result<String, Box<dyn Error>> {
check_solc_requirement();
let settings = GraphSettings::load(&settings_path)?;
let commitment: Commitments = settings.run_args.commitment.into();
let params = load_params_verifier::<KZGCommitmentScheme<Bn256>>(
srs_path,
settings.run_args.logrows,
commitment,
)?;
let num_instance = settings.total_instances();
let num_instance: usize = num_instance.iter().sum::<usize>();
let vk = load_vk::<KZGCommitmentScheme<Bn256>, GraphCircuit>(vk_path, settings)?;
trace!("params computed");
let generator = halo2_solidity_verifier::SolidityGenerator: |
:new(
¶ms,
&vk,
halo2_solidity_verifier::BatchOpenScheme::Bdfg21,
num_instance,
);
let verifier_solidity = if render_vk_seperately {
generator.render_separately()?.0
} else {
generator.render()?
};
File::create(sol_code_path.clone())?.write_all(verifier_solidity.as_bytes())?;
let (abi, _, _) = get_contract_artifacts(sol_code_path, "Halo2Verifier", 0)?;
serde_json::to_writer(std::fs::File::create(abi_path)?, &abi)?;
Ok(String::new())
}
pub(crate) fn create_evm_vk(
vk_path: PathBuf,
srs_path: Option<PathBuf>,
settings_path: PathBuf,
sol_code_path: PathBuf,
abi_path: PathBuf,
) -> Result<String, Box<dyn Error>> {
check_solc_requirement();
let settings = GraphSettings::load(&settings_path)?;
let commitment: Commitments = settings.run_args.commitment.into();
let params = load_params_verifier::<KZGCommitmentScheme<Bn256>>(
srs_path,
settings.run_args.logrows,
commitment,
)?;
let num_instance = settings.total_instances();
let num_instance: usize = num_instance.iter().sum::<usize>();
let vk = load_vk::<KZGCommitmentScheme<Bn256>, GraphCircuit>(vk_path, settings)?;
trace!("params computed");
let generator = halo2_solidity_verifier::SolidityGenerator::new(
¶ms,
&vk,
halo2_solidity_verifier::BatchOpenScheme::Bdfg21,
num_instance,
);
let vk_solidity = generator.render_separately()?.1;
File::create(sol_code_path.clone())?.write_all(vk_solidity.as_bytes())?;
let (abi, _, _) = get_contract_artifacts(sol_code_path, "Halo2VerifyingKey", 0)?;
serde_json::to_writer(std::fs::File::create(abi_path)?, &abi)?;
Ok(String::new())
}
pub(crate) fn create_evm_data_attestation(
settings_path: PathBuf,
_sol_code_path: PathBuf,
_abi_path: PathBuf,
_input: PathBuf,
) -> Result<String, Box<dyn Error>> {
use crate::graph::{DataSource, VarVisibility};
check_sol |
c_requirement();
let settings = GraphSettings::load(&settings_path)?;
let visibility = VarVisibility::from_args(&settings.run_args)?;
trace!("params computed");
let data = GraphData::from_path(_input)?;
let output_data = if let Some(DataSource::OnChain(source)) = data.output_data {
if visibility.output.is_private() {
return Err("private output data on chain is not supported on chain".into());
}
let mut on_chain_output_data = vec![];
for call in source.calls {
on_chain_output_data.push(call);
}
Some(on_chain_output_data)
} else {
None
};
let input_data = if let DataSource::OnChain(source) = data.input_data {
if visibility.input.is_private() {
return Err("private input data on chain is not supported on chain".into());
}
let mut on_chain_input_data = vec![];
for call in source.calls {
on_chain_input_data.push(call);
}
Some(on_chain_input_data)
} else {
None
};
if input_data.is_some() || output_data.is_some() {
let output = fix_da_sol(input_data, output_data)?;
let mut f = File::create(_sol_code_path.clone())?;
let _ = f.write(output.as_bytes());
let (abi, _, _) = get_contract_artifacts(_sol_code_path, "DataAttestation", 0)?;
serde_json::to_writer(std::fs::File::create(_abi_path)?, &abi)?;
} else {
return Err(
"Neither input or output data source is on-chain. Atleast one must be on chain.".into(),
);
}
Ok(String::new())
}
pub(crate) async fn deploy_da_evm(
data: PathBuf,
settings_path: PathBuf,
sol_code_path: PathBuf,
rpc_url: Option<String>,
addr_path: PathBuf,
runs: usize,
private_key: Option<String>,
) -> Result<String, Box<dyn Error>> {
check_solc_requirement();
let contract_address = deploy_da_verifier_via_solidity(
settings_path,
data,
sol_code_pat |
h,
rpc_url.as_deref(),
runs,
private_key.as_deref(),
)
.await?;
info!("Contract deployed at: {}", contract_address);
let mut f = File::create(addr_path)?;
write!(f, "{:
Ok(String::new())
}
pub(crate) async fn deploy_evm(
sol_code_path: PathBuf,
rpc_url: Option<String>,
addr_path: PathBuf,
runs: usize,
private_key: Option<String>,
contract_name: &str,
) -> Result<String, Box<dyn Error>> {
check_solc_requirement();
let contract_address = deploy_contract_via_solidity(
sol_code_path,
rpc_url.as_deref(),
runs,
private_key.as_deref(),
contract_name,
)
.await?;
info!("Contract deployed at: {:
let mut f = File::create(addr_path)?;
write!(f, "{:
Ok(String::new())
}
pub(crate) async fn verify_evm(
proof_path: PathBuf,
addr_verifier: H160Flag,
rpc_url: Option<String>,
addr_da: Option<H160Flag>,
addr_vk: Option<H160Flag>,
) -> Result<String, Box<dyn Error>> {
use crate::eth::verify_proof_with_data_attestation;
check_solc_requirement();
let proof = Snark::load::<KZGCommitmentScheme<Bn256>>(&proof_path)?;
let result = if let Some(addr_da) = addr_da {
verify_proof_with_data_attestation(
proof.clone(),
addr_verifier.into(),
addr_da.into(),
addr_vk.map(|s| s.into()),
rpc_url.as_deref(),
)
.await?
} else {
verify_proof_via_solidity(
proof.clone(),
addr_verifier.into(),
addr_vk.map(|s| s.into()),
rpc_url.as_deref(),
)
.await?
};
info!("Solidity verification result: {}", result);
if !result {
return Err("Solidity verification failed".into());
}
Ok(String::new())
}
pub(crate) fn create_evm_aggregate_verifier(
vk_path: PathBuf,
srs_path: Option<PathBuf>,
sol_code_path: PathBuf,
abi_path: PathBuf,
circuit_settings: Vec<PathBuf>,
logrows |
: u32,
render_vk_seperately: bool,
) -> Result<String, Box<dyn Error>> {
check_solc_requirement();
let srs_path = get_srs_path(logrows, srs_path, Commitments::KZG);
let params: ParamsKZG<Bn256> = load_srs_verifier::<KZGCommitmentScheme<Bn256>>(srs_path)?;
let mut settings: Vec<GraphSettings> = vec![];
for path in circuit_settings.iter() {
let s = GraphSettings::load(path)?;
settings.push(s);
}
let num_instance: usize = settings
.iter()
.map(|s| s.total_instances().iter().sum::<usize>())
.sum();
let num_instance = AggregationCircuit::num_instance(num_instance);
assert_eq!(num_instance.len(), 1);
let num_instance = num_instance[0];
let agg_vk = load_vk::<KZGCommitmentScheme<Bn256>, AggregationCircuit>(vk_path, ())?;
let mut generator = halo2_solidity_verifier::SolidityGenerator::new(
¶ms,
&agg_vk,
halo2_solidity_verifier::BatchOpenScheme::Bdfg21,
num_instance,
);
let acc_encoding = halo2_solidity_verifier::AccumulatorEncoding::new(
0,
AggregationCircuit::num_limbs(),
AggregationCircuit::num_bits(),
);
generator = generator.set_acc_encoding(Some(acc_encoding));
let verifier_solidity = if render_vk_seperately {
generator.render_separately()?.0
} else {
generator.render()?
};
File::create(sol_code_path.clone())?.write_all(verifier_solidity.as_bytes())?;
let (abi, _, _) = get_contract_artifacts(sol_code_path, "Halo2Verifier", 0)?;
serde_json::to_writer(std::fs::File::create(abi_path)?, &abi)?;
Ok(String::new())
}
pub(crate) fn compile_circuit(
model_path: PathBuf,
compiled_circuit: PathBuf,
settings_path: PathBuf,
) -> Result<String, Box<dyn Error>> {
let settings = GraphSettings::load(&settings_path)?;
let circuit = GraphCircuit::from_settings(&settings, &model_path, CheckMode::UNSAFE)?;
circuit.save(compiled_circuit)?;
Ok(String::new())
}
pub(crate) fn setu |
p(
compiled_circuit: PathBuf,
srs_path: Option<PathBuf>,
vk_path: PathBuf,
pk_path: PathBuf,
witness: Option<PathBuf>,
disable_selector_compression: bool,
) -> Result<String, Box<dyn Error>> {
let mut circuit = GraphCircuit::load(compiled_circuit)?;
if let Some(witness) = witness {
let data = GraphWitness::from_path(witness)?;
circuit.load_graph_witness(&data)?;
}
let logrows = circuit.settings().run_args.logrows;
let commitment: Commitments = circuit.settings().run_args.commitment.into();
let pk = match commitment {
Commitments::KZG => {
let params = load_params_prover::<KZGCommitmentScheme<Bn256>>(
srs_path,
logrows,
Commitments::KZG,
)?;
create_keys::<KZGCommitmentScheme<Bn256>, GraphCircuit>(
&circuit,
¶ms,
disable_selector_compression,
)?
}
Commitments::IPA => {
let params = load_params_prover::<IPACommitmentScheme<G1Affine>>(
srs_path,
logrows,
Commitments::IPA,
)?;
create_keys::<IPACommitmentScheme<G1Affine>, GraphCircuit>(
&circuit,
¶ms,
disable_selector_compression,
)?
}
};
save_vk::<G1Affine>(&vk_path, pk.get_vk())?;
save_pk::<G1Affine>(&pk_path, &pk)?;
Ok(String::new())
}
pub(crate) async fn setup_test_evm_witness(
data_path: PathBuf,
compiled_circuit_path: PathBuf,
test_data: PathBuf,
rpc_url: Option<String>,
input_source: TestDataSource,
output_source: TestDataSource,
) -> Result<String, Box<dyn Error>> {
use crate::graph::TestOnChainData;
info!("run this command in background to keep the instance running for testing");
let mut data = GraphData::from_path(data_path)?;
let mut circuit = GraphCircuit::load(compiled_circuit_path)?;
if matches |
!(input_source, TestDataSource::File) && matches!(output_source, TestDataSource::File)
{
return Err("Both input and output cannot be from files".into());
}
let test_on_chain_data = TestOnChainData {
data: test_data.clone(),
rpc: rpc_url,
data_sources: TestSources {
input: input_source,
output: output_source,
},
};
circuit
.populate_on_chain_test_data(&mut data, test_on_chain_data)
.await?;
Ok(String::new())
}
use crate::pfsys::ProofType;
pub(crate) async fn test_update_account_calls(
addr: H160Flag,
data: PathBuf,
rpc_url: Option<String>,
) -> Result<String, Box<dyn Error>> {
use crate::eth::update_account_calls;
check_solc_requirement();
update_account_calls(addr.into(), data, rpc_url.as_deref()).await?;
Ok(String::new())
}
pub(crate) fn prove(
data_path: PathBuf,
compiled_circuit_path: PathBuf,
pk_path: PathBuf,
proof_path: Option<PathBuf>,
srs_path: Option<PathBuf>,
proof_type: ProofType,
check_mode: CheckMode,
) -> Result<Snark<Fr, G1Affine>, Box<dyn Error>> {
let data = GraphWitness::from_path(data_path)?;
let mut circuit = GraphCircuit::load(compiled_circuit_path)?;
circuit.load_graph_witness(&data)?;
let pretty_public_inputs = circuit.pretty_public_inputs(&data)?;
let public_inputs = circuit.prepare_public_inputs(&data)?;
let circuit_settings = circuit.settings().clone();
let strategy: StrategyType = proof_type.into();
let transcript: TranscriptType = proof_type.into();
let proof_split_commits: Option<ProofSplitCommit> = data.into();
let commitment = circuit_settings.run_args.commitment.into();
let logrows = circuit_settings.run_args.logrows;
let mut snark = match commitment {
Commitments::KZG => {
let pk =
load_pk::<KZGCommitmentScheme<Bn256>, GraphCircuit>(pk_path, circuit.params())?;
let params = load_params_prover::<KZGCommitment |
Scheme<Bn256>>(
srs_path,
logrows,
Commitments::KZG,
)?;
match strategy {
StrategyType::Single => create_proof_circuit::<
KZGCommitmentScheme<Bn256>,
_,
ProverSHPLONK<_>,
VerifierSHPLONK<_>,
KZGSingleStrategy<_>,
_,
EvmTranscript<_, _, _, _>,
EvmTranscript<_, _, _, _>,
>(
circuit,
vec![public_inputs],
¶ms,
&pk,
check_mode,
commitment,
transcript,
proof_split_commits,
None,
),
StrategyType::Accum => {
let protocol = Some(compile(
¶ms,
pk.get_vk(),
Config::kzg().with_num_instance(vec![public_inputs.len()]),
));
create_proof_circuit::<
KZGCommitmentScheme<Bn256>,
_,
ProverSHPLONK<_>,
VerifierSHPLONK<_>,
KZGAccumulatorStrategy<_>,
_,
PoseidonTranscript<NativeLoader, _>,
PoseidonTranscript<NativeLoader, _>,
>(
circuit,
vec![public_inputs],
¶ms,
&pk,
check_mode,
commitment,
transcript,
proof_split_commits,
protocol,
)
}
}
}
Commitments::IPA => {
let pk =
load_pk::<IPACommitmentScheme<G1Affine>, Graph |
Circuit>(pk_path, circuit.params())?;
let params = load_params_prover::<IPACommitmentScheme<G1Affine>>(
srs_path,
circuit_settings.run_args.logrows,
Commitments::IPA,
)?;
match strategy {
StrategyType::Single => create_proof_circuit::<
IPACommitmentScheme<G1Affine>,
_,
ProverIPA<_>,
VerifierIPA<_>,
IPASingleStrategy<_>,
_,
EvmTranscript<_, _, _, _>,
EvmTranscript<_, _, _, _>,
>(
circuit,
vec![public_inputs],
¶ms,
&pk,
check_mode,
commitment,
transcript,
proof_split_commits,
None,
),
StrategyType::Accum => {
let protocol = Some(compile(
¶ms,
pk.get_vk(),
Config::ipa().with_num_instance(vec![public_inputs.len()]),
));
create_proof_circuit::<
IPACommitmentScheme<G1Affine>,
_,
ProverIPA<_>,
VerifierIPA<_>,
IPAAccumulatorStrategy<_>,
_,
PoseidonTranscript<NativeLoader, _>,
PoseidonTranscript<NativeLoader, _>,
>(
circuit,
vec![public_inputs],
¶ms,
&pk,
check_mode,
commitment,
transcript,
proof_split_commits,
protocol,
)
}
} |
}
}?;
snark.pretty_public_inputs = pretty_public_inputs;
if let Some(proof_path) = proof_path {
snark.save(&proof_path)?;
}
Ok(snark)
}
pub(crate) fn swap_proof_commitments_cmd(
proof_path: PathBuf,
witness: PathBuf,
) -> Result<Snark<Fr, G1Affine>, Box<dyn Error>> {
let snark = Snark::load::<KZGCommitmentScheme<Bn256>>(&proof_path)?;
let witness = GraphWitness::from_path(witness)?;
let commitments = witness.get_polycommitments();
if commitments.is_empty() {
log::warn!("no commitments found in witness");
}
let snark_new = swap_proof_commitments_polycommit(&snark, &commitments)?;
if snark_new.proof != *snark.proof {
log::warn!("swap proof has created a different proof");
}
snark_new.save(&proof_path)?;
Ok(snark_new)
}
pub(crate) fn mock_aggregate(
aggregation_snarks: Vec<PathBuf>,
logrows: u32,
split_proofs: bool,
) -> Result<String, Box<dyn Error>> {
let mut snarks = vec![];
for proof_path in aggregation_snarks.iter() {
match Snark::load::<KZGCommitmentScheme<Bn256>>(proof_path) {
Ok(snark) => {
snarks.push(snark);
}
Err(_) => {
return Err(
"invalid sample commitment type for aggregation, must be KZG"
.to_string()
.into(),
);
}
}
}
let pb = {
let pb = init_spinner();
pb.set_message("Aggregating (may take a while)...");
pb
};
let circuit = AggregationCircuit::new(&G1Affine::generator().into(), snarks, split_proofs)?;
let prover = halo2_proofs::dev::MockProver::run(logrows, &circuit, vec![circuit.instances()])
.map_err(Box::<dyn Error>::from)?;
prover
.verify()
.map_err(|e| Box::<dyn Error>::from(ExecutionError::VerifyError(e)))?;
pb.finish_with_message("Done.");
Ok(String::new())
}
pub(crate) fn setup_aggregate(
sampl |
e_snarks: Vec<PathBuf>,
vk_path: PathBuf,
pk_path: PathBuf,
srs_path: Option<PathBuf>,
logrows: u32,
split_proofs: bool,
disable_selector_compression: bool,
commitment: Commitments,
) -> Result<String, Box<dyn Error>> {
let mut snarks = vec![];
for proof_path in sample_snarks.iter() {
match Snark::load::<KZGCommitmentScheme<Bn256>>(proof_path) {
Ok(snark) => {
snarks.push(snark);
}
Err(_) => {
return Err(
"invalid sample commitment type for aggregation, must be KZG"
.to_string()
.into(),
);
}
}
}
let circuit = AggregationCircuit::new(&G1Affine::generator().into(), snarks, split_proofs)?;
let pk = match commitment {
Commitments::KZG => {
let params = load_params_prover::<KZGCommitmentScheme<Bn256>>(
srs_path,
logrows,
Commitments::KZG,
)?;
create_keys::<KZGCommitmentScheme<Bn256>, AggregationCircuit>(
&circuit,
¶ms,
disable_selector_compression,
)?
}
Commitments::IPA => {
let params = load_params_prover::<IPACommitmentScheme<G1Affine>>(
srs_path,
logrows,
Commitments::IPA,
)?;
create_keys::<IPACommitmentScheme<G1Affine>, AggregationCircuit>(
&circuit,
¶ms,
disable_selector_compression,
)?
}
};
save_vk::<G1Affine>(&vk_path, pk.get_vk())?;
save_pk::<G1Affine>(&pk_path, &pk)?;
Ok(String::new())
}
pub(crate) fn aggregate(
proof_path: PathBuf,
aggregation_snarks: Vec<PathBuf>,
pk_path: PathBuf,
srs_path: Option<PathBuf>,
transcript: TranscriptType,
logrows: u32,
check_mode: CheckMode,
split_proofs: bool,
commitment: |
Commitments,
) -> Result<Snark<Fr, G1Affine>, Box<dyn Error>> {
let mut snarks = vec![];
for proof_path in aggregation_snarks.iter() {
match Snark::load::<KZGCommitmentScheme<Bn256>>(proof_path) {
Ok(snark) => {
snarks.push(snark);
}
Err(_) => {
return Err(
"invalid sample commitment type for aggregation, must be KZG"
.to_string()
.into(),
);
}
}
}
let pb = {
let pb = init_spinner();
pb.set_message("Aggregating (may take a while)...");
pb
};
let now = Instant::now();
let snark = match commitment {
Commitments::KZG => {
let pk = load_pk::<KZGCommitmentScheme<Bn256>, AggregationCircuit>(pk_path, ())?;
let params: ParamsKZG<Bn256> = load_params_prover::<KZGCommitmentScheme<_>>(
srs_path.clone(),
logrows,
Commitments::KZG,
)?;
let circuit = AggregationCircuit::new(
&ParamsProver::<G1Affine>::get_g(¶ms)[0].into(),
snarks,
split_proofs,
)?;
let public_inputs = circuit.instances();
match transcript {
TranscriptType::EVM => create_proof_circuit::<
KZGCommitmentScheme<Bn256>,
_,
ProverSHPLONK<_>,
VerifierSHPLONK<_>,
KZGSingleStrategy<_>,
_,
EvmTranscript<_, _, _, _>,
EvmTranscript<_, _, _, _>,
>(
circuit,
vec![public_inputs],
¶ms,
&pk,
check_mode,
commitment,
transcript,
None,
None,
),
Transcr |
iptType::Poseidon => {
let protocol = Some(compile(
¶ms,
pk.get_vk(),
Config::kzg().with_num_instance(vec![public_inputs.len()]),
));
create_proof_circuit::<
KZGCommitmentScheme<Bn256>,
_,
ProverSHPLONK<_>,
VerifierSHPLONK<_>,
KZGAccumulatorStrategy<_>,
_,
PoseidonTranscript<NativeLoader, _>,
PoseidonTranscript<NativeLoader, _>,
>(
circuit,
vec![public_inputs],
¶ms,
&pk,
check_mode,
commitment,
transcript,
None,
protocol,
)
}
}
}
Commitments::IPA => {
let pk = load_pk::<IPACommitmentScheme<_>, AggregationCircuit>(pk_path, ())?;
let params: ParamsIPA<_> = load_params_prover::<IPACommitmentScheme<_>>(
srs_path.clone(),
logrows,
Commitments::IPA,
)?;
let circuit = AggregationCircuit::new(
&ParamsProver::<G1Affine>::get_g(¶ms)[0].into(),
snarks,
split_proofs,
)?;
let public_inputs = circuit.instances();
match transcript {
TranscriptType::EVM => create_proof_circuit::<
IPACommitmentScheme<G1Affine>,
_,
ProverIPA<_>,
VerifierIPA<_>,
IPASingleStrategy<_>,
_,
EvmTranscript<_, _, _, _>,
EvmTranscript<_, _, _, _>,
>( |
circuit,
vec![public_inputs],
¶ms,
&pk,
check_mode,
commitment,
transcript,
None,
None,
),
TranscriptType::Poseidon => {
let protocol = Some(compile(
¶ms,
pk.get_vk(),
Config::ipa().with_num_instance(vec![public_inputs.len()]),
));
create_proof_circuit::<
IPACommitmentScheme<G1Affine>,
_,
ProverIPA<_>,
VerifierIPA<_>,
IPAAccumulatorStrategy<_>,
_,
PoseidonTranscript<NativeLoader, _>,
PoseidonTranscript<NativeLoader, _>,
>(
circuit,
vec![public_inputs],
¶ms,
&pk,
check_mode,
commitment,
transcript,
None,
protocol,
)
}
}
}
}?;
let elapsed = now.elapsed();
info!(
"Aggregation proof took {}.{}",
elapsed.as_secs(),
elapsed.subsec_millis()
);
snark.save(&proof_path)?;
pb.finish_with_message("Done.");
Ok(snark)
}
pub(crate) fn verify(
proof_path: PathBuf,
settings_path: PathBuf,
vk_path: PathBuf,
srs_path: Option<PathBuf>,
reduced_srs: bool,
) -> Result<bool, Box<dyn Error>> {
let circuit_settings = GraphSettings::load(&settings_path)?;
let logrows = circuit_settings.run_args.logrows;
let commitment = circuit_settings.run_args.commitment.into();
match commitment {
Commitments::KZG => {
le |
t proof = Snark::load::<KZGCommitmentScheme<Bn256>>(&proof_path)?;
let params: ParamsKZG<Bn256> = if reduced_srs {
load_params_verifier::<KZGCommitmentScheme<Bn256>>(srs_path, 1, Commitments::KZG)?
} else {
load_params_verifier::<KZGCommitmentScheme<Bn256>>(
srs_path,
logrows,
Commitments::KZG,
)?
};
match proof.transcript_type {
TranscriptType::EVM => {
verify_commitment::<
KZGCommitmentScheme<Bn256>,
VerifierSHPLONK<'_, Bn256>,
_,
KZGSingleStrategy<_>,
EvmTranscript<G1Affine, _, _, _>,
GraphCircuit,
_,
>(proof_path, circuit_settings, vk_path, ¶ms, logrows)
}
TranscriptType::Poseidon => {
verify_commitment::<
KZGCommitmentScheme<Bn256>,
VerifierSHPLONK<'_, Bn256>,
_,
KZGSingleStrategy<_>,
PoseidonTranscript<NativeLoader, _>,
GraphCircuit,
_,
>(proof_path, circuit_settings, vk_path, ¶ms, logrows)
}
}
}
Commitments::IPA => {
let proof = Snark::load::<IPACommitmentScheme<G1Affine>>(&proof_path)?;
let params: ParamsIPA<_> = load_params_verifier::<IPACommitmentScheme<G1Affine>>(
srs_path,
logrows,
Commitments::IPA,
)?;
match proof.transcript_type {
TranscriptType::EVM => {
verify_commitment::<
IPACommitmentScheme<G1Affine>,
VerifierIPA<_>,
_ |
,
IPASingleStrategy<_>,
EvmTranscript<G1Affine, _, _, _>,
GraphCircuit,
_,
>(proof_path, circuit_settings, vk_path, ¶ms, logrows)
}
TranscriptType::Poseidon => {
verify_commitment::<
IPACommitmentScheme<G1Affine>,
VerifierIPA<_>,
_,
IPASingleStrategy<_>,
PoseidonTranscript<NativeLoader, _>,
GraphCircuit,
_,
>(proof_path, circuit_settings, vk_path, ¶ms, logrows)
}
}
}
}
}
fn verify_commitment<
'a,
Scheme: CommitmentScheme,
V: Verifier<'a, Scheme>,
E: EncodedChallenge<Scheme::Curve>,
Strategy: VerificationStrategy<'a, Scheme, V>,
TR: TranscriptReadBuffer<Cursor<Vec<u8>>, Scheme::Curve, E>,
C: Circuit<<Scheme as CommitmentScheme>::Scalar, Params = Params>,
Params,
>(
proof_path: PathBuf,
settings: Params,
vk_path: PathBuf,
params: &'a Scheme::ParamsVerifier,
logrows: u32,
) -> Result<bool, Box<dyn Error>>
where
Scheme::Scalar: FromUniformBytes<64>
+ SerdeObject
+ Serialize
+ DeserializeOwned
+ WithSmallOrderMulGroup<3>,
Scheme::Curve: SerdeObject + Serialize + DeserializeOwned,
Scheme::ParamsVerifier: 'a,
{
let proof = Snark::load::<Scheme>(&proof_path)?;
let strategy = Strategy::new(params);
let vk = load_vk::<Scheme, C>(vk_path, settings)?;
let now = Instant::now();
let result =
verify_proof_circuit::<V, _, _, _, TR>(&proof, params, &vk, strategy, 1 << logrows);
let elapsed = now.elapsed();
info!(
"verify took {}.{}",
elapsed.as_secs(),
elapsed.subsec_millis()
);
info!("verified: {}", result.is_ok());
result.map_err(|e: plonk::Error| e.into()).map(| |
_| true)
}
pub(crate) fn verify_aggr(
proof_path: PathBuf,
vk_path: PathBuf,
srs_path: Option<PathBuf>,
logrows: u32,
reduced_srs: bool,
commitment: Commitments,
) -> Result<bool, Box<dyn Error>> {
match commitment {
Commitments::KZG => {
let proof = Snark::load::<KZGCommitmentScheme<Bn256>>(&proof_path)?;
let params: ParamsKZG<Bn256> = if reduced_srs {
load_params_verifier::<KZGCommitmentScheme<Bn256>>(srs_path, 1, Commitments::KZG)?
} else {
load_params_verifier::<KZGCommitmentScheme<Bn256>>(
srs_path,
logrows,
Commitments::KZG,
)?
};
match proof.transcript_type {
TranscriptType::EVM => verify_commitment::<
KZGCommitmentScheme<Bn256>,
VerifierSHPLONK<'_, Bn256>,
_,
KZGSingleStrategy<_>,
EvmTranscript<_, _, _, _>,
AggregationCircuit,
_,
>(proof_path, (), vk_path, ¶ms, logrows),
TranscriptType::Poseidon => {
verify_commitment::<
KZGCommitmentScheme<Bn256>,
VerifierSHPLONK<'_, Bn256>,
_,
KZGAccumulatorStrategy<_>,
PoseidonTranscript<NativeLoader, _>,
AggregationCircuit,
_,
>(proof_path, (), vk_path, ¶ms, logrows)
}
}
}
Commitments::IPA => {
let proof = Snark::load::<IPACommitmentScheme<G1Affine>>(&proof_path)?;
let params: ParamsIPA<_> = load_params_verifier::<IPACommitmentScheme<G1Affine>>(
srs_path,
logrows,
Commitments::IPA,
)?;
match proof.transcript_type { |
TranscriptType::EVM => verify_commitment::<
IPACommitmentScheme<G1Affine>,
VerifierIPA<_>,
_,
IPASingleStrategy<_>,
EvmTranscript<_, _, _, _>,
AggregationCircuit,
_,
>(proof_path, (), vk_path, ¶ms, logrows),
TranscriptType::Poseidon => {
verify_commitment::<
IPACommitmentScheme<G1Affine>,
VerifierIPA<_>,
_,
IPAAccumulatorStrategy<_>,
PoseidonTranscript<NativeLoader, _>,
AggregationCircuit,
_,
>(proof_path, (), vk_path, ¶ms, logrows)
}
}
}
}
}
pub(crate) fn load_params_verifier<Scheme: CommitmentScheme>(
srs_path: Option<PathBuf>,
logrows: u32,
commitment: Commitments,
) -> Result<Scheme::ParamsVerifier, Box<dyn Error>> {
let srs_path = get_srs_path(logrows, srs_path, commitment);
let mut params = load_srs_verifier::<Scheme>(srs_path)?;
info!("downsizing params to {} logrows", logrows);
if logrows < params.k() {
params.downsize(logrows);
}
Ok(params)
}
pub(crate) fn load_params_prover<Scheme: CommitmentScheme>(
srs_path: Option<PathBuf>,
logrows: u32,
commitment: Commitments,
) -> Result<Scheme::ParamsProver, Box<dyn Error>> {
let srs_path = get_srs_path(logrows, srs_path, commitment);
let mut params = load_srs_prover::<Scheme>(srs_path)?;
info!("downsizing params to {} logrows", logrows);
if logrows < params.k() {
params.downsize(logrows);
}
Ok(params)
} |
use halo2_proofs::arithmetic::Field;
use halo2curves::ff::PrimeField;
pub fn i32_to_felt<F: PrimeField>(x: i32) -> F {
if x >= 0 {
F::from(x as u64)
} else {
-F::from(x.unsigned_abs() as u64)
}
}
pub fn i128_to_felt<F: PrimeField>(x: i128) -> F {
if x >= 0 {
F::from_u128(x as u128)
} else {
-F::from_u128((-x) as u128)
}
}
pub fn felt_to_i32<F: PrimeField + PartialOrd + Field>(x: F) -> i32 {
if x > F::from(i32::MAX as u64) {
let rep = (-x).to_repr();
let negtmp: &[u8] = rep.as_ref();
let lower_32 = u32::from_le_bytes(negtmp[..4].try_into().unwrap());
-(lower_32 as i32)
} else {
let rep = (x).to_repr();
let tmp: &[u8] = rep.as_ref();
let lower_32 = u32::from_le_bytes(tmp[..4].try_into().unwrap());
lower_32 as i32
}
}
pub fn felt_to_f64<F: PrimeField + PartialOrd + Field>(x: F) -> f64 {
if x > F::from_u128(i128::MAX as u128) {
let rep = (-x).to_repr();
let negtmp: &[u8] = rep.as_ref();
let lower_128: u128 = u128::from_le_bytes(negtmp[..16].try_into().unwrap());
-(lower_128 as f64)
} else {
let rep = (x).to_repr();
let tmp: &[u8] = rep.as_ref();
let lower_128: u128 = u128::from_le_bytes(tmp[..16].try_into().unwrap());
lower_128 as f64
}
}
pub fn felt_to_i128<F: PrimeField + PartialOrd + Field>(x: F) -> i128 {
if x > F::from_u128(i128::MAX as u128) {
let rep = (-x).to_repr();
let negtmp: &[u8] = rep.as_ref();
let lower_128: u128 = u128::from_le_bytes(negtmp[..16].try_into().unwrap());
-(lower_128 as i128)
} else {
let rep = (x).to_repr();
let tmp: &[u8] = rep.as_ref();
let lower_128: u128 = u128::from_le_bytes(tmp[..16].try_into().unwrap());
lower_128 as i128
}
}
mod test {
use super::*;
use halo2curves::pasta::Fp as F; |
fn test_conv() {
let res: F = i32_to_felt(-15i32);
assert_eq!(res, -F::from(15));
let res: F = i32_to_felt(2_i32.pow(17));
assert_eq!(res, F::from(131072));
let res: F = i128_to_felt(-15i128);
assert_eq!(res, -F::from(15));
let res: F = i128_to_felt(2_i128.pow(17));
assert_eq!(res, F::from(131072));
} |
fn felttoi32() {
for x in -(2i32.pow(16))..(2i32.pow(16)) {
let fieldx: F = i32_to_felt::<F>(x);
let xf: i32 = felt_to_i32::<F>(fieldx);
assert_eq!(x, xf);
}
} |
fn felttoi128() {
for x in -(2i128.pow(20))..(2i128.pow(20)) {
let fieldx: F = i128_to_felt::<F>(x);
let xf: i128 = felt_to_i128::<F>(fieldx);
assert_eq!(x, xf);
}
}
} |
use super::quantize_float;
use super::GraphError;
use crate::circuit::InputType;
use crate::fieldutils::i128_to_felt;
use crate::tensor::Tensor;
use crate::EZKL_BUF_CAPACITY;
use halo2curves::bn256::Fr as Fp;
use postgres::{Client, NoTls};
use pyo3::prelude::*;
use pyo3::types::PyDict;
use pyo3::ToPyObject;
use serde::ser::SerializeStruct;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::io::BufReader;
use std::io::BufWriter;
use std::io::Read;
use std::panic::UnwindSafe;
use tract_onnx::tract_core::{
tract_data::{prelude::Tensor as TractTensor, TVec},
value::TValue,
};
use tract_onnx::tract_hir::tract_num_traits::ToPrimitive;
type Decimals = u8;
type Call = String;
type RPCUrl = String;
pub enum FileSourceInner {
Float(f64),
Bool(bool),
Field(Fp),
}
impl FileSourceInner {
pub fn is_float(&self) -> bool {
matches!(self, FileSourceInner::Float(_))
}
pub fn is_bool(&self) -> bool {
matches!(self, FileSourceInner::Bool(_))
}
pub fn is_field(&self) -> bool {
matches!(self, FileSourceInner::Field(_))
}
}
impl Serialize for FileSourceInner {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
FileSourceInner::Field(data) => data.serialize(serializer),
FileSourceInner::Bool(data) => data.serialize(serializer),
FileSourceInner::Float(data) => data.serialize(serializer),
}
}
}
impl<'de> Deserialize<'de> for FileSourceInner {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let this_json: Box<serde_json::value::RawValue> = Deserialize::deserialize(deserializer)?;
let bool_try: Result<bool, _> = serde_json::from_str(this_json.get());
if let Ok(t) = bool_try {
return Ok(FileSourceInner::Bool(t));
}
let float_try: Result<f64, _> = serde_json::from_str(this |
_json.get());
if let Ok(t) = float_try {
return Ok(FileSourceInner::Float(t));
}
let field_try: Result<Fp, _> = serde_json::from_str(this_json.get());
if let Ok(t) = field_try {
return Ok(FileSourceInner::Field(t));
}
Err(serde::de::Error::custom(
"failed to deserialize FileSourceInner",
))
}
}
pub type FileSource = Vec<Vec<FileSourceInner>>;
impl FileSourceInner {
pub fn new_float(f: f64) -> Self {
FileSourceInner::Float(f)
}
pub fn new_field(f: Fp) -> Self {
FileSourceInner::Field(f)
}
pub fn new_bool(f: bool) -> Self {
FileSourceInner::Bool(f)
}
pub |
fn as_type(&mut self, input_type: &InputType) {
match self {
FileSourceInner::Float(f) => input_type.roundtrip(f),
FileSourceInner::Bool(_) => assert!(matches!(input_type, InputType::Bool)),
FileSourceInner::Field(_) => {}
}
}
pub fn to_field(&self, scale: crate::Scale) -> Fp {
match self {
FileSourceInner::Float(f) => i128_to_felt(quantize_float(f, 0.0, scale).unwrap()),
FileSourceInner::Bool(f) => {
if *f {
Fp::one()
} else {
Fp::zero()
}
}
FileSourceInner::Field(f) => *f,
}
}
pub fn to_float(&self) -> f64 {
match self {
FileSourceInner::Float(f) => *f,
FileSourceInner::Bool(f) => {
if *f {
1.0
} else {
0.0
}
}
FileSourceInner::Field(f) => crate::fieldutils::felt_to_i128(*f) as f64,
}
}
}
pub |
struct OnChainSource {
pub calls: Vec<CallsToAccount>,
pub rpc: RPCUrl,
}
impl OnChainSource {
pub fn new(calls: Vec<CallsToAccount>, rpc: RPCUrl) -> Self {
OnChainSource { calls, rpc }
}
}
pub |
struct PostgresSource {
pub host: RPCUrl,
pub user: String,
pub password: String,
pub query: String,
pub dbname: String,
pub port: String,
}
impl PostgresSource {
pub fn new(
host: RPCUrl,
port: String,
user: String,
query: String,
dbname: String,
password: String,
) -> Self {
PostgresSource {
host,
user,
password,
query,
dbname,
port,
}
}
pub fn fetch(&self) -> Result<Vec<Vec<pg_bigdecimal::PgNumeric>>, Box<dyn std::error::Error>> {
let user = self.user.clone();
let host = self.host.clone();
let query = self.query.clone();
let dbname = self.dbname.clone();
let port = self.port.clone();
let password = self.password.clone();
let config = if password.is_empty() {
format!(
"host={} user={} dbname={} port={}",
host, user, dbname, port
)
} else {
format!(
"host={} user={} dbname={} port={} password={}",
host, user, dbname, port, password
)
};
let mut client = Client::connect(&config, NoTls)?;
let mut res: Vec<pg_bigdecimal::PgNumeric> = Vec::new();
for row in client.query(&query, &[])? {
for i in 0..row.len() {
res.push(row.get(i));
}
}
Ok(vec![res])
}
pub fn fetch_and_format_as_file(
&self,
) -> Result<Vec<Vec<FileSourceInner>>, Box<dyn std::error::Error>> {
Ok(self
.fetch()?
.iter()
.map(|d| {
d.iter()
.map(|d| {
FileSourceInner::Float(
d.n.as_ref()
.unwrap()
.to_f64() |
.ok_or("could not convert decimal to f64")
.unwrap(),
)
})
.collect()
})
.collect())
}
}
impl OnChainSource {
pub async fn test_from_file_data(
data: &FileSource,
scales: Vec<crate::Scale>,
mut shapes: Vec<Vec<usize>>,
rpc: Option<&str>,
) -> Result<(Vec<Tensor<Fp>>, Self), Box<dyn std::error::Error>> {
use crate::eth::{evm_quantize, read_on_chain_inputs, test_on_chain_data};
use log::debug;
let (anvil, client) = crate::eth::setup_eth_backend(rpc, None).await?;
let address = client.address();
let mut scales = scales;
for (idx, i) in data.iter().enumerate() {
if i.iter().all(|e| e.is_field()) {
scales[idx] = 0;
shapes[idx] = vec![i.len()];
}
}
let calls_to_accounts = test_on_chain_data(client.clone(), data).await?;
debug!("Calls to accounts: {:?}", calls_to_accounts);
let inputs = read_on_chain_inputs(client.clone(), address, &calls_to_accounts).await?;
debug!("Inputs: {:?}", inputs);
let mut quantized_evm_inputs = vec![];
let mut prev = 0;
for (idx, i) in data.iter().enumerate() {
quantized_evm_inputs.extend(
evm_quantize(
client.clone(),
vec![scales[idx]; i.len()],
&(
inputs.0[prev..i.len()].to_vec(),
inputs.1[prev..i.len()].to_vec(),
),
)
.await?,
);
prev += i.len();
}
let mut inputs: Vec<Tensor<Fp>> = vec![];
for (input, shape) in [quantized_evm_inputs].iter().zip(shapes) {
let mut t: Tensor<Fp> = input.iter().cloned().collect();
t.reshape(&shape)?;
inputs.push(t); |
}
let used_rpc = rpc.unwrap_or(&anvil.endpoint()).to_string();
Ok((
inputs,
OnChainSource::new(calls_to_accounts.clone(), used_rpc),
))
}
}
pub |
struct CallsToAccount {
pub call_data: Vec<(Call, Decimals)>,
pub address: String,
}
pub enum DataSource {
File(FileSource),
OnChain(OnChainSource),
DB(PostgresSource),
}
impl Default for DataSource {
fn default() -> Self {
DataSource::File(vec![vec![]])
}
}
impl From<FileSource> for DataSource {
fn from(data: FileSource) -> Self {
DataSource::File(data)
}
}
impl From<Vec<Vec<Fp>>> for DataSource {
fn from(data: Vec<Vec<Fp>>) -> Self {
DataSource::File(
data.iter()
.map(|e| e.iter().map(|e| FileSourceInner::Field(*e)).collect())
.collect(),
)
}
}
impl From<Vec<Vec<f64>>> for DataSource {
fn from(data: Vec<Vec<f64>>) -> Self {
DataSource::File(
data.iter()
.map(|e| e.iter().map(|e| FileSourceInner::Float(*e)).collect())
.collect(),
)
}
}
impl From<OnChainSource> for DataSource {
fn from(data: OnChainSource) -> Self {
DataSource::OnChain(data)
}
}
impl<'de> Deserialize<'de> for DataSource {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let this_json: Box<serde_json::value::RawValue> = Deserialize::deserialize(deserializer)?;
let first_try: Result<FileSource, _> = serde_json::from_str(this_json.get());
if let Ok(t) = first_try {
return Ok(DataSource::File(t));
}
let second_try: Result<OnChainSource, _> = serde_json::from_str(this_json.get());
if let Ok(t) = second_try {
return Ok(DataSource::OnChain(t));
}
{
let third_try: Result<PostgresSource, _> = serde_json::from_str(this_json.get());
if let Ok(t) = third_try {
return Ok(DataSource::DB(t));
}
}
Err(serde::de::Error::custom("failed to deserialize DataSource"))
}
} |
pub |
struct GraphData {
pub input_data: DataSource,
pub output_data: Option<DataSource>,
}
impl UnwindSafe for GraphData {}
impl GraphData {
pub fn to_tract_data(
&self,
shapes: &[Vec<usize>],
datum_types: &[tract_onnx::prelude::DatumType],
) -> Result<TVec<TValue>, Box<dyn std::error::Error>> {
let mut inputs = TVec::new();
match &self.input_data {
DataSource::File(data) => {
for (i, input) in data.iter().enumerate() {
if !input.is_empty() {
let dt = datum_types[i];
let input = input.iter().map(|e| e.to_float()).collect::<Vec<f64>>();
let tt = TractTensor::from_shape(&shapes[i], &input)?;
let tt = tt.cast_to_dt(dt)?;
inputs.push(tt.into_owned().into());
}
}
}
_ => {
return Err(Box::new(GraphError::InvalidDims(
0,
"non file data cannot be split into batches".to_string(),
)))
}
}
Ok(inputs)
}
pub fn new(input_data: DataSource) -> Self {
GraphData {
input_data,
output_data: None,
}
}
pub fn from_path(path: std::path::PathBuf) -> Result<Self, Box<dyn std::error::Error>> {
let reader = std::fs::File::open(path)?;
let mut reader = BufReader::with_capacity(*EZKL_BUF_CAPACITY, reader);
let mut buf = String::new();
reader.read_to_string(&mut buf)?;
let graph_input = serde_json::from_str(&buf)?;
Ok(graph_input)
}
pub fn save(&self, path: std::path::PathBuf) -> Result<(), Box<dyn std::error::Error>> {
let writer = BufWriter::with_capacity(*EZKL_BUF_CAPACITY, std::fs::File::create(path)?);
serde_json::to_writer(writer, self)?;
Ok(())
}
pub fn split_int |
o_batches(
&self,
input_shapes: Vec<Vec<usize>>,
) -> Result<Vec<Self>, Box<dyn std::error::Error>> {
let mut batched_inputs = vec![];
let iterable = match self {
GraphData {
input_data: DataSource::File(data),
output_data: _,
} => data.clone(),
GraphData {
input_data: DataSource::OnChain(_),
output_data: _,
} => {
return Err(Box::new(GraphError::InvalidDims(
0,
"on-chain data cannot be split into batches".to_string(),
)))
}
GraphData {
input_data: DataSource::DB(data),
output_data: _,
} => data.fetch_and_format_as_file()?,
};
for (i, shape) in input_shapes.iter().enumerate() {
let input_size = shape.clone().iter().product::<usize>();
let input = &iterable[i];
if input.len() % input_size != 0 {
return Err(Box::new(GraphError::InvalidDims(
0,
"calibration data length must be evenly divisible by the original input_size"
.to_string(),
)));
}
let mut batches = vec![];
for batch in input.chunks(input_size) {
batches.push(batch.to_vec());
}
batched_inputs.push(batches);
}
let num_batches = if batched_inputs.is_empty() {
0
} else {
let num_batches = batched_inputs[0].len();
for input in batched_inputs.iter() {
assert_eq!(input.len(), num_batches);
}
num_batches
};
let mut input_batches = vec![];
for i in 0..num_batches {
let mut batch = vec![];
for input in batched_inputs.iter() {
batch.push(i |
nput[i].clone());
}
input_batches.push(DataSource::File(batch));
}
if input_batches.is_empty() {
input_batches.push(DataSource::File(vec![vec![]]));
}
let batches = input_batches
.into_iter()
.map(GraphData::new)
.collect::<Vec<GraphData>>();
Ok(batches)
}
}
impl ToPyObject for CallsToAccount {
fn to_object(&self, py: Python) -> PyObject {
let dict = PyDict::new(py);
dict.set_item("account", &self.address).unwrap();
dict.set_item("call_data", &self.call_data).unwrap();
dict.to_object(py)
}
}
impl ToPyObject for DataSource {
fn to_object(&self, py: Python) -> PyObject {
match self {
DataSource::File(data) => data.to_object(py),
DataSource::OnChain(source) => {
let dict = PyDict::new(py);
dict.set_item("rpc_url", &source.rpc).unwrap();
dict.set_item("calls_to_accounts", &source.calls).unwrap();
dict.to_object(py)
}
DataSource::DB(source) => {
let dict = PyDict::new(py);
dict.set_item("host", &source.host).unwrap();
dict.set_item("user", &source.user).unwrap();
dict.set_item("query", &source.query).unwrap();
dict.to_object(py)
}
}
}
}
use crate::pfsys::field_to_string;
impl ToPyObject for FileSourceInner {
fn to_object(&self, py: Python) -> PyObject {
match self {
FileSourceInner::Field(data) => field_to_string(data).to_object(py),
FileSourceInner::Bool(data) => data.to_object(py),
FileSourceInner::Float(data) => data.to_object(py),
}
}
}
impl Serialize for GraphData {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut state = serializer.serialize_struct("GraphData", 4)?;
state.se |
rialize_field("input_data", &self.input_data)?;
state.serialize_field("output_data", &self.output_data)?;
state.end()
}
}
mod tests {
use super::*; |
fn test_data_source_serialization_round_trip() {
let source = DataSource::from(vec![vec![0.053_262_424, 0.074_970_566, 0.052_355_476]]);
let serialized = serde_json::to_string(&source).unwrap();
const JSON: &str = r
assert_eq!(serialized, JSON);
let expect = serde_json::from_str::<DataSource>(JSON)
.map_err(|e| e.to_string())
.unwrap();
assert_eq!(expect, source);
} |
fn test_graph_input_serialization_round_trip() {
let file = GraphData::new(DataSource::from(vec![vec![
0.05326242372393608,
0.07497056573629379,
0.05235547572374344,
]]));
let serialized = serde_json::to_string(&file).unwrap();
const JSON: &str = r
assert_eq!(serialized, JSON);
let graph_input3 = serde_json::from_str::<GraphData>(JSON)
.map_err(|e| e.to_string())
.unwrap();
assert_eq!(graph_input3, file);
} |
fn test_python_compat() {
let source = Fp::from_raw([18445520602771460712, 838677322461845011, 3079992810, 0]);
let original_addr = "0x000000000000000000000000b794f5ea0ba39494ce839613fffba74279579268";
assert_eq!(format!("{:?}", source), original_addr);
}
} |
pub mod input;
pub mod model;
pub mod modules;
pub mod node;
pub mod utilities;
pub mod vars;
use colored_json::ToColoredJson;
use gag::Gag;
use halo2_proofs::plonk::VerifyingKey;
use halo2_proofs::poly::commitment::CommitmentScheme;
pub use input::DataSource;
use itertools::Itertools;
use tosubcommand::ToFlags;
use self::input::OnChainSource;
use self::input::{FileSource, GraphData};
use self::modules::{GraphModules, ModuleConfigs, ModuleForwardResult, ModuleSizes};
use crate::circuit::lookup::LookupOp;
use crate::circuit::modules::ModulePlanner;
use crate::circuit::region::ConstantsMap;
use crate::circuit::table::{num_cols_required, Range, Table, RESERVED_BLINDING_ROWS_PAD};
use crate::circuit::{CheckMode, InputType};
use crate::fieldutils::felt_to_f64;
use crate::pfsys::PrettyElements;
use crate::tensor::{Tensor, ValTensor};
use crate::{RunArgs, EZKL_BUF_CAPACITY};
use halo2_proofs::{
circuit::Layouter,
plonk::{Circuit, ConstraintSystem, Error as PlonkError},
};
use halo2curves::bn256::{self, Fr as Fp, G1Affine};
use halo2curves::ff::{Field, PrimeField};
use lazy_static::lazy_static;
use log::{debug, error, trace, warn};
use maybe_rayon::prelude::{IntoParallelRefIterator, ParallelIterator};
pub use model::*;
pub use node::*;
use pyo3::prelude::*;
use pyo3::types::PyDict;
use pyo3::ToPyObject;
use serde::{Deserialize, Serialize};
use std::ops::Deref;
use thiserror::Error;
pub use utilities::*;
pub use vars::*;
use crate::pfsys::field_to_string;
pub const RANGE_MULTIPLIER: i128 = 2;
pub const MAX_NUM_LOOKUP_COLS: usize = 12;
pub const MAX_LOOKUP_ABS: i128 = (MAX_NUM_LOOKUP_COLS as i128) * 2_i128.pow(MAX_PUBLIC_SRS);
lazy_static! {
pub static ref EZKL_MAX_CIRCUIT_AREA: Option<usize> =
if let Ok(max_circuit_area) = std::env::var("EZKL_MAX_CIRCUIT_AREA") {
Some(max_circuit_area.parse().unwrap_or(0))
} else {
None
};
}
const EZKL_MAX_CIRCUIT_AREA: Option<usize> = None;
pub enum GraphError {
InvalidLookupInputs |
,
InvalidDims(usize, String),
WrongMethod(usize, String),
MissingNode(usize),
OpMismatch(usize, String),
UnsupportedOp,
UnsupportedDataType,
MissingParams(String),
MisformedParams(String),
Visibility,
NonConstantDiv,
NonConstantPower,
RescalingError(String),
ModelLoad,
PackingExponent,
InvalidInputTypes,
MissingResults,
}
pub const ASSUMED_BLINDING_FACTORS: usize = 5;
pub const MIN_LOGROWS: u32 = 6;
pub const MAX_PUBLIC_SRS: u32 = bn256::Fr::S - 2;
pub const RESERVED_BLINDING_ROWS: usize = ASSUMED_BLINDING_FACTORS + RESERVED_BLINDING_ROWS_PAD;
use std::cell::RefCell;
thread_local!(
pub static GLOBAL_SETTINGS: RefCell<Option<GraphSettings>> = const { RefCell::new(None) }
);
pub |
struct GraphWitness {
pub inputs: Vec<Vec<Fp>>,
pub pretty_elements: Option<PrettyElements>,
pub outputs: Vec<Vec<Fp>>,
pub processed_inputs: Option<ModuleForwardResult>,
pub processed_params: Option<ModuleForwardResult>,
pub processed_outputs: Option<ModuleForwardResult>,
pub max_lookup_inputs: i128,
pub min_lookup_inputs: i128,
pub max_range_size: i128,
}
impl GraphWitness {
pub fn get_float_outputs(&self, scales: &[crate::Scale]) -> Vec<Tensor<f32>> {
self.outputs
.iter()
.enumerate()
.map(|(i, x)| {
x.iter()
.map(|y| (felt_to_f64(*y) / scale_to_multiplier(scales[i])) as f32)
.collect::<Tensor<f32>>()
})
.collect()
}
pub fn new(inputs: Vec<Vec<Fp>>, outputs: Vec<Vec<Fp>>) -> Self {
GraphWitness {
inputs,
outputs,
pretty_elements: None,
processed_inputs: None,
processed_params: None,
processed_outputs: None,
max_lookup_inputs: 0,
min_lookup_inputs: 0,
max_range_size: 0,
}
}
pub |
fn generate_rescaled_elements(
&mut self,
input_scales: Vec<crate::Scale>,
output_scales: Vec<crate::Scale>,
visibility: VarVisibility,
) {
let mut pretty_elements = PrettyElements {
rescaled_inputs: self
.inputs
.iter()
.enumerate()
.map(|(i, t)| {
let scale = input_scales[i];
t.iter()
.map(|x| dequantize(*x, scale, 0.).to_string())
.collect()
})
.collect(),
inputs: self
.inputs
.iter()
.map(|t| t.iter().map(|x| format!("{:?}", x)).collect())
.collect(),
rescaled_outputs: self
.outputs
.iter()
.enumerate()
.map(|(i, t)| {
let scale = output_scales[i];
t.iter()
.map(|x| dequantize(*x, scale, 0.).to_string())
.collect()
})
.collect(),
outputs: self
.outputs
.iter()
.map(|t| t.iter().map(|x| format!("{:?}", x)).collect())
.collect(),
..Default::default()
};
if let Some(processed_inputs) = self.processed_inputs.clone() {
pretty_elements.processed_inputs = processed_inputs
.get_result(visibility.input)
.iter()
.map(|x| x.iter().map(|y| format!("{:?}", y)).collect())
.collect();
}
if let Some(processed_params) = self.processed_params.clone() {
pretty_elements.processed_params = processed_params
.get_result(visibility.params)
.iter()
.map(|x| x.iter().map(|y| format!("{:?}", y)).collect())
.collect();
} |
if let Some(processed_outputs) = self.processed_outputs.clone() {
pretty_elements.processed_outputs = processed_outputs
.get_result(visibility.output)
.iter()
.map(|x| x.iter().map(|y| format!("{:?}", y)).collect())
.collect();
}
self.pretty_elements = Some(pretty_elements);
}
pub fn get_polycommitments(&self) -> Vec<G1Affine> {
let mut commitments = vec![];
if let Some(processed_inputs) = &self.processed_inputs {
if let Some(commits) = &processed_inputs.polycommit {
commitments.extend(commits.iter().flatten());
}
}
if let Some(processed_params) = &self.processed_params {
if let Some(commits) = &processed_params.polycommit {
commitments.extend(commits.iter().flatten());
}
}
if let Some(processed_outputs) = &self.processed_outputs {
if let Some(commits) = &processed_outputs.polycommit {
commitments.extend(commits.iter().flatten());
}
}
commitments
}
pub fn as_json(&self) -> Result<String, Box<dyn std::error::Error>> {
let serialized = match serde_json::to_string(&self) {
Ok(s) => s,
Err(e) => {
return Err(Box::new(e));
}
};
Ok(serialized)
}
pub fn from_path(path: std::path::PathBuf) -> Result<Self, Box<dyn std::error::Error>> {
let file = std::fs::File::open(path.clone())
.map_err(|_| format!("failed to load {}", path.display()))?;
let reader = std::io::BufReader::with_capacity(*EZKL_BUF_CAPACITY, file);
serde_json::from_reader(reader).map_err(|e| e.into())
}
pub fn save(&self, path: std::path::PathBuf) -> Result<(), Box<dyn std::error::Error>> {
let writer =
std::io::BufWriter::with_capacity(*EZKL_BUF_CAPACITY, std::fs::File |
::create(path)?);
serde_json::to_writer(writer, &self).map_err(|e| e.into())
}
pub fn get_input_tensor(&self) -> Vec<Tensor<Fp>> {
self.inputs
.clone()
.into_iter()
.map(|i| Tensor::from(i.into_iter()))
.collect::<Vec<Tensor<Fp>>>()
}
pub fn get_output_tensor(&self) -> Vec<Tensor<Fp>> {
self.outputs
.clone()
.into_iter()
.map(|i| Tensor::from(i.into_iter()))
.collect::<Vec<Tensor<Fp>>>()
}
}
impl ToPyObject for GraphWitness {
fn to_object(&self, py: Python) -> PyObject {
let dict = PyDict::new(py);
let dict_inputs = PyDict::new(py);
let dict_params = PyDict::new(py);
let dict_outputs = PyDict::new(py);
let inputs: Vec<Vec<String>> = self
.inputs
.iter()
.map(|x| x.iter().map(field_to_string).collect())
.collect();
let outputs: Vec<Vec<String>> = self
.outputs
.iter()
.map(|x| x.iter().map(field_to_string).collect())
.collect();
dict.set_item("inputs", inputs).unwrap();
dict.set_item("outputs", outputs).unwrap();
dict.set_item("max_lookup_inputs", self.max_lookup_inputs)
.unwrap();
dict.set_item("min_lookup_inputs", self.min_lookup_inputs)
.unwrap();
dict.set_item("max_range_size", self.max_range_size)
.unwrap();
if let Some(processed_inputs) = &self.processed_inputs {
if let Some(processed_inputs_poseidon_hash) = &processed_inputs.poseidon_hash {
insert_poseidon_hash_pydict(dict_inputs, processed_inputs_poseidon_hash).unwrap();
}
if let Some(processed_inputs_polycommit) = &processed_inputs.polycommit {
insert_polycommit_pydict(dict_inputs, processed_inputs_polycommit).unwrap();
}
dict.set_item("processed_inputs", dic |
t_inputs).unwrap();
}
if let Some(processed_params) = &self.processed_params {
if let Some(processed_params_poseidon_hash) = &processed_params.poseidon_hash {
insert_poseidon_hash_pydict(dict_params, processed_params_poseidon_hash).unwrap();
}
if let Some(processed_params_polycommit) = &processed_params.polycommit {
insert_polycommit_pydict(dict_inputs, processed_params_polycommit).unwrap();
}
dict.set_item("processed_params", dict_params).unwrap();
}
if let Some(processed_outputs) = &self.processed_outputs {
if let Some(processed_outputs_poseidon_hash) = &processed_outputs.poseidon_hash {
insert_poseidon_hash_pydict(dict_outputs, processed_outputs_poseidon_hash).unwrap();
}
if let Some(processed_outputs_polycommit) = &processed_outputs.polycommit {
insert_polycommit_pydict(dict_inputs, processed_outputs_polycommit).unwrap();
}
dict.set_item("processed_outputs", dict_outputs).unwrap();
}
dict.to_object(py)
}
}
fn insert_poseidon_hash_pydict(pydict: &PyDict, poseidon_hash: &Vec<Fp>) -> Result<(), PyErr> {
let poseidon_hash: Vec<String> = poseidon_hash.iter().map(field_to_string).collect();
pydict.set_item("poseidon_hash", poseidon_hash)?;
Ok(())
}
fn insert_polycommit_pydict(pydict: &PyDict, commits: &Vec<Vec<G1Affine>>) -> Result<(), PyErr> {
use crate::python::PyG1Affine;
let poseidon_hash: Vec<Vec<PyG1Affine>> = commits
.iter()
.map(|c| c.iter().map(|x| PyG1Affine::from(*x)).collect())
.collect();
pydict.set_item("polycommit", poseidon_hash)?;
Ok(())
}
pub |
struct GraphSettings {
pub run_args: RunArgs,
pub num_rows: usize,
pub total_assignments: usize,
pub total_const_size: usize,
pub total_dynamic_col_size: usize,
pub num_dynamic_lookups: usize,
pub num_shuffles: usize,
pub total_shuffle_col_size: usize,
pub model_instance_shapes: Vec<Vec<usize>>,
pub model_output_scales: Vec<crate::Scale>,
pub model_input_scales: Vec<crate::Scale>,
pub module_sizes: ModuleSizes,
pub required_lookups: Vec<LookupOp>,
pub required_range_checks: Vec<Range>,
pub check_mode: CheckMode,
pub version: String,
pub num_blinding_factors: Option<usize>,
pub timestamp: Option<u128>,
}
impl GraphSettings {
pub fn lookup_log_rows(&self) -> u32 {
((self.run_args.lookup_range.1 - self.run_args.lookup_range.0) as f32)
.log2()
.ceil() as u32
}
pub fn lookup_log_rows_with_blinding(&self) -> u32 {
((self.run_args.lookup_range.1 - self.run_args.lookup_range.0) as f32
+ RESERVED_BLINDING_ROWS as f32)
.log2()
.ceil() as u32
}
fn model_constraint_logrows_with_blinding(&self) -> u32 {
(self.num_rows as f64 + RESERVED_BLINDING_ROWS as f64)
.log2()
.ceil() as u32
}
fn dynamic_lookup_and_shuffle_logrows(&self) -> u32 {
(self.total_dynamic_col_size as f64 + self.total_shuffle_col_size as f64)
.log2()
.ceil() as u32
}
pub fn dynamic_lookup_and_shuffle_logrows_with_blinding(&self) -> u32 {
(self.total_dynamic_col_size as f64
+ self.total_shuffle_col_size as f64
+ RESERVED_BLINDING_ROWS as f64)
.log2()
.ceil() as u32
}
fn dynamic_lookup_and_shuffle_col_size(&self) -> usize {
self.total_dynamic_col_size + self.total_shuffle_col_size
}
pub fn module_constraint_logrows(&self) -> u |
32 {
(self.module_sizes.max_constraints() as f64).log2().ceil() as u32
}
pub fn module_constraint_logrows_with_blinding(&self) -> u32 {
(self.module_sizes.max_constraints() as f64 + RESERVED_BLINDING_ROWS as f64)
.log2()
.ceil() as u32
}
fn constants_logrows(&self) -> u32 {
(self.total_const_size as f64 / self.run_args.num_inner_cols as f64)
.log2()
.ceil() as u32
}
pub fn total_instances(&self) -> Vec<usize> {
let mut instances: Vec<usize> = self
.model_instance_shapes
.iter()
.map(|x| x.iter().product())
.collect();
instances.extend(self.module_sizes.num_instances());
instances
}
pub fn log2_total_instances(&self) -> u32 {
let sum = self.total_instances().iter().sum::<usize>();
std::cmp::max((sum as f64).log2().ceil() as u32, 1)
}
pub fn log2_total_instances_with_blinding(&self) -> u32 {
let sum = self.total_instances().iter().sum::<usize>() + RESERVED_BLINDING_ROWS;
std::cmp::max((sum as f64).log2().ceil() as u32, 1)
}
pub fn save(&self, path: &std::path::PathBuf) -> Result<(), std::io::Error> {
let writer =
std::io::BufWriter::with_capacity(*EZKL_BUF_CAPACITY, std::fs::File::create(path)?);
serde_json::to_writer(writer, &self).map_err(|e| {
error!("failed to save settings file at {}", e);
std::io::Error::new(std::io::ErrorKind::Other, e)
})
}
pub fn load(path: &std::path::PathBuf) -> Result<Self, std::io::Error> {
let reader =
std::io::BufReader::with_capacity(*EZKL_BUF_CAPACITY, std::fs::File::open(path)?);
serde_json::from_reader(reader).map_err(|e| {
error!("failed to load settings file at {}", e);
std::io::Error::new(std::io::ErrorKind::Other, e)
})
}
pub fn as_json(&self) |
-> Result<String, Box<dyn std::error::Error>> {
let serialized = match serde_json::to_string(&self) {
Ok(s) => s,
Err(e) => {
return Err(Box::new(e));
}
};
Ok(serialized)
}
pub fn from_json(arg_json: &str) -> Result<Self, serde_json::Error> {
serde_json::from_str(arg_json)
} |
fn set_num_blinding_factors(&mut self, num_blinding_factors: usize) {
self.num_blinding_factors = Some(num_blinding_factors);
}
pub fn available_col_size(&self) -> usize {
let base = 2u32;
if let Some(num_blinding_factors) = self.num_blinding_factors {
base.pow(self.run_args.logrows) as usize - num_blinding_factors - 1
} else {
log::error!("num_blinding_factors not set");
log::warn!("using default available_col_size");
base.pow(self.run_args.logrows) as usize - ASSUMED_BLINDING_FACTORS - 1
}
}
pub fn uses_modules(&self) -> bool {
!self.module_sizes.max_constraints() > 0
}
pub fn module_requires_fixed(&self) -> bool {
self.run_args.input_visibility.is_hashed()
|| self.run_args.output_visibility.is_hashed()
|| self.run_args.param_visibility.is_hashed()
}
pub fn requires_dynamic_lookup(&self) -> bool {
self.num_dynamic_lookups > 0
}
pub fn requires_shuffle(&self) -> bool {
self.num_shuffles > 0
}
pub fn module_requires_polycommit(&self) -> bool {
self.run_args.input_visibility.is_polycommit()
|| self.run_args.output_visibility.is_polycommit()
|| self.run_args.param_visibility.is_polycommit()
}
}
pub |
struct GraphConfig {
model_config: ModelConfig,
module_configs: ModuleConfigs,
circuit_size: CircuitSize,
}
pub |
struct CoreCircuit {
pub model: Model,
pub settings: GraphSettings,
}
pub |
struct GraphCircuit {
pub core: CoreCircuit,
pub graph_witness: GraphWitness,
}
impl GraphCircuit {
pub fn settings(&self) -> &GraphSettings {
&self.core.settings
}
pub fn settings_mut(&mut self) -> &mut GraphSettings {
&mut self.core.settings
}
pub fn model(&self) -> &Model {
&self.core.model
}
pub fn save(&self, path: std::path::PathBuf) -> Result<(), Box<dyn std::error::Error>> {
let f = std::fs::File::create(path)?;
let writer = std::io::BufWriter::with_capacity(*EZKL_BUF_CAPACITY, f);
bincode::serialize_into(writer, &self)?;
Ok(())
}
pub fn load(path: std::path::PathBuf) -> Result<Self, Box<dyn std::error::Error>> {
let f = std::fs::File::open(path)?;
let reader = std::io::BufReader::with_capacity(*EZKL_BUF_CAPACITY, f);
let result: GraphCircuit = bincode::deserialize_from(reader)?;
Ok(result)
}
}
pub enum TestDataSource {
File,
OnChain,
}
impl std::fmt::Display for TestDataSource {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
TestDataSource::File => write!(f, "file"),
TestDataSource::OnChain => write!(f, "on-chain"),
}
}
}
impl ToFlags for TestDataSource {}
impl From<String> for TestDataSource {
fn from(value: String) -> Self {
match value.to_lowercase().as_str() {
"file" => TestDataSource::File,
"on-chain" => TestDataSource::OnChain,
_ => {
error!("invalid data source: {}", value);
warn!("using default data source: on-chain");
TestDataSource::default()
}
}
}
}
pub |
struct TestSources {
pub input: TestDataSource,
pub output: TestDataSource,
}
pub |
struct TestOnChainData {
pub data: std::path::PathBuf,
pub rpc: Option<String>,
pub data_sources: TestSources,
}
impl GraphCircuit {
pub fn new(
model: Model,
run_args: &RunArgs,
) -> Result<GraphCircuit, Box<dyn std::error::Error>> {
let mut inputs: Vec<Vec<Fp>> = vec![];
for shape in model.graph.input_shapes()? {
let t: Vec<Fp> = vec![Fp::zero(); shape.iter().product::<usize>()];
inputs.push(t);
}
let mut settings = model.gen_params(run_args, run_args.check_mode)?;
let mut num_params = 0;
if !model.const_shapes().is_empty() {
for shape in model.const_shapes() {
num_params += shape.iter().product::<usize>();
}
}
let sizes = GraphModules::num_constraints_and_instances(
model.graph.input_shapes()?,
vec![vec![num_params]],
model.graph.output_shapes()?,
VarVisibility::from_args(run_args)?,
);
settings.module_sizes = sizes.clone();
settings.num_rows = std::cmp::max(settings.num_rows, sizes.max_constraints());
let core = CoreCircuit {
model,
settings: settings.clone(),
};
Ok(GraphCircuit {
core,
graph_witness: GraphWitness::new(inputs, vec![]),
})
}
pub fn new_from_settings(
model: Model,
mut settings: GraphSettings,
check_mode: CheckMode,
) -> Result<GraphCircuit, Box<dyn std::error::Error>> {
let mut inputs: Vec<Vec<Fp>> = vec![];
for shape in model.graph.input_shapes()? {
let t: Vec<Fp> = vec![Fp::zero(); shape.iter().product::<usize>()];
inputs.push(t);
}
settings.check_mode = check_mode;
let core = CoreCircuit {
model,
settings: settings.clone(),
};
Ok(GraphCircuit { |
core,
graph_witness: GraphWitness::new(inputs, vec![]),
})
}
pub fn load_graph_witness(
&mut self,
data: &GraphWitness,
) -> Result<(), Box<dyn std::error::Error>> {
self.graph_witness = data.clone();
Ok(())
}
pub fn prepare_public_inputs(
&self,
data: &GraphWitness,
) -> Result<Vec<Fp>, Box<dyn std::error::Error>> {
let mut public_inputs: Vec<Fp> = vec![];
if self.settings().run_args.input_visibility.is_public() {
public_inputs.extend(self.graph_witness.inputs.clone().into_iter().flatten())
} else if let Some(processed_inputs) = &data.processed_inputs {
public_inputs.extend(processed_inputs.get_instances().into_iter().flatten());
}
if let Some(processed_params) = &data.processed_params {
public_inputs.extend(processed_params.get_instances().into_iter().flatten());
}
if self.settings().run_args.output_visibility.is_public() {
public_inputs.extend(self.graph_witness.outputs.clone().into_iter().flatten());
} else if let Some(processed_outputs) = &data.processed_outputs {
public_inputs.extend(processed_outputs.get_instances().into_iter().flatten());
}
if public_inputs.len() < 11 {
debug!("public inputs: {:?}", public_inputs);
} else {
debug!("public inputs: {:?} ...", &public_inputs[0..10]);
}
Ok(public_inputs)
}
pub fn pretty_public_inputs(
&self,
data: &GraphWitness,
) -> Result<Option<PrettyElements>, Box<dyn std::error::Error>> {
if data.pretty_elements.is_none() {
warn!("no rescaled elements found in witness data");
return Ok(None);
}
let mut public_inputs = PrettyElements::default();
let elements = data.pretty_elements.as_ref().unwrap();
if self.settings().run_arg |
s.input_visibility.is_public() {
public_inputs.rescaled_inputs = elements.rescaled_inputs.clone();
public_inputs.inputs = elements.inputs.clone();
} else if data.processed_inputs.is_some() {
public_inputs.processed_inputs = elements.processed_inputs.clone();
}
if data.processed_params.is_some() {
public_inputs.processed_params = elements.processed_params.clone();
}
if self.settings().run_args.output_visibility.is_public() {
public_inputs.rescaled_outputs = elements.rescaled_outputs.clone();
public_inputs.outputs = elements.outputs.clone();
} else if data.processed_outputs.is_some() {
public_inputs.processed_outputs = elements.processed_outputs.clone();
}
debug!(
"rescaled and processed public inputs: {}",
serde_json::to_string(&public_inputs)?.to_colored_json_auto()?
);
Ok(Some(public_inputs))
}
pub fn load_graph_input(
&mut self,
data: &GraphData,
) -> Result<Vec<Tensor<Fp>>, Box<dyn std::error::Error>> {
let shapes = self.model().graph.input_shapes()?;
let scales = self.model().graph.get_input_scales();
let input_types = self.model().graph.get_input_types()?;
self.process_data_source(&data.input_data, shapes, scales, input_types)
}
pub fn load_graph_from_file_exclusively(
&mut self,
data: &GraphData,
) -> Result<Vec<Tensor<Fp>>, Box<dyn std::error::Error>> {
let shapes = self.model().graph.input_shapes()?;
let scales = self.model().graph.get_input_scales();
let input_types = self.model().graph.get_input_types()?;
debug!("input scales: {:?}", scales);
match &data.input_data {
DataSource::File(file_data) => {
self.load_file_data(file_data, &shapes, scales, input_types)
}
_ => Err("Cannot use non-file data source as input |
for this method.".into()),
}
}
pub fn load_graph_input(
&mut self,
data: &GraphData,
) -> Result<Vec<Tensor<Fp>>, Box<dyn std::error::Error>> {
let shapes = self.model().graph.input_shapes()?;
let scales = self.model().graph.get_input_scales();
let input_types = self.model().graph.get_input_types()?;
debug!("input scales: {:?}", scales);
self.process_data_source(&data.input_data, shapes, scales, input_types)
}
fn process_data_source(
&mut self,
data: &DataSource,
shapes: Vec<Vec<usize>>,
scales: Vec<crate::Scale>,
input_types: Vec<InputType>,
) -> Result<Vec<Tensor<Fp>>, Box<dyn std::error::Error>> {
match &data {
DataSource::File(file_data) => {
self.load_file_data(file_data, &shapes, scales, input_types)
}
DataSource::OnChain(_) => {
Err("Cannot use on-chain data source as input for this method.".into())
}
}
}
fn process_data_source(
&mut self,
data: &DataSource,
shapes: Vec<Vec<usize>>,
scales: Vec<crate::Scale>,
input_types: Vec<InputType>,
) -> Result<Vec<Tensor<Fp>>, Box<dyn std::error::Error>> {
match &data {
DataSource::OnChain(source) => {
let mut per_item_scale = vec![];
for (i, shape) in shapes.iter().enumerate() {
per_item_scale.extend(vec![scales[i]; shape.iter().product::<usize>()]);
}
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()?;
runtime.block_on(async {
self.load_on_chain_data(source.clone(), &shapes, per_item_scale)
.await
})
}
DataSource::File(file_data) => {
self.load_file_data( |
file_data, &shapes, scales, input_types)
}
DataSource::DB(pg) => {
let data = pg.fetch_and_format_as_file()?;
self.load_file_data(&data, &shapes, scales, input_types)
}
}
}
pub async fn load_on_chain_data(
&mut self,
source: OnChainSource,
shapes: &Vec<Vec<usize>>,
scales: Vec<crate::Scale>,
) -> Result<Vec<Tensor<Fp>>, Box<dyn std::error::Error>> {
use crate::eth::{evm_quantize, read_on_chain_inputs, setup_eth_backend};
let (_, client) = setup_eth_backend(Some(&source.rpc), None).await?;
let inputs = read_on_chain_inputs(client.clone(), client.address(), &source.calls).await?;
let quantized_evm_inputs = evm_quantize(client, scales, &inputs).await?;
let mut inputs: Vec<Tensor<Fp>> = vec![];
for (input, shape) in [quantized_evm_inputs].iter().zip(shapes) {
let mut t: Tensor<Fp> = input.iter().cloned().collect();
t.reshape(shape)?;
inputs.push(t);
}
Ok(inputs)
}
pub fn load_file_data(
&mut self,
file_data: &FileSource,
shapes: &Vec<Vec<usize>>,
scales: Vec<crate::Scale>,
input_types: Vec<InputType>,
) -> Result<Vec<Tensor<Fp>>, Box<dyn std::error::Error>> {
let mut data: Vec<Tensor<Fp>> = vec![];
for (((d, shape), scale), input_type) in file_data
.iter()
.zip(shapes)
.zip(scales)
.zip(input_types.iter())
{
let t: Vec<Fp> = d
.par_iter()
.map(|x| {
let mut x = x.clone();
x.as_type(input_type);
x.to_field(scale)
})
.collect();
let mut t: Tensor<Fp> = t.into_iter().into();
t.reshape(shape)?;
data.push(t);
}
Ok(data)
}
pub fn load_witnes |
s_file_data(
&mut self,
file_data: &[Vec<Fp>],
shapes: &[Vec<usize>],
) -> Result<Vec<Tensor<Fp>>, Box<dyn std::error::Error>> {
let mut data: Vec<Tensor<Fp>> = vec![];
for (d, shape) in file_data.iter().zip(shapes) {
let mut t: Tensor<Fp> = d.clone().into_iter().into();
t.reshape(shape)?;
data.push(t);
}
Ok(data)
}
fn calc_safe_lookup_range(min_max_lookup: Range, lookup_safety_margin: i128) -> Range {
(
lookup_safety_margin * min_max_lookup.0,
lookup_safety_margin * min_max_lookup.1,
)
}
fn calc_num_cols(range_len: i128, max_logrows: u32) -> usize {
let max_col_size = Table::<Fp>::cal_col_size(max_logrows as usize, RESERVED_BLINDING_ROWS);
num_cols_required(range_len, max_col_size)
}
fn table_size_logrows(
&self,
safe_lookup_range: Range,
max_range_size: i128,
) -> Result<u32, Box<dyn std::error::Error>> {
let safe_range = std::cmp::max(
(safe_lookup_range.1 - safe_lookup_range.0).abs(),
max_range_size,
);
let min_bits = (safe_range as f64 + RESERVED_BLINDING_ROWS as f64 + 1.)
.log2()
.ceil() as u32;
Ok(min_bits)
}
pub fn calc_min_logrows(
&mut self,
min_max_lookup: Range,
max_range_size: i128,
max_logrows: Option<u32>,
lookup_safety_margin: i128,
) -> Result<(), Box<dyn std::error::Error>> {
let max_logrows = max_logrows.unwrap_or(MAX_PUBLIC_SRS);
let max_logrows = std::cmp::min(max_logrows, MAX_PUBLIC_SRS);
let mut max_logrows = std::cmp::max(max_logrows, MIN_LOGROWS);
let mut min_logrows = MIN_LOGROWS;
let safe_lookup_range = Self::calc_safe_lookup_range(min_max_lookup, lookup_safety_margin);
if (min_max_lookup.1 - min_max_lookup.0).abs() > MAX_LOOKUP_ABS / lookup_safety_margin { |
let err_string = format!("max lookup input {:?} is too large", min_max_lookup);
return Err(err_string.into());
}
if max_range_size.abs() > MAX_LOOKUP_ABS {
let err_string = format!("max range check size {:?} is too large", max_range_size);
return Err(err_string.into());
}
let instance_logrows = self.settings().log2_total_instances();
let module_constraint_logrows = self.settings().module_constraint_logrows();
let dynamic_lookup_logrows = self.settings().dynamic_lookup_and_shuffle_logrows();
min_logrows = std::cmp::max(
min_logrows,
*[
instance_logrows,
module_constraint_logrows,
dynamic_lookup_logrows,
]
.iter()
.max()
.unwrap(),
);
let model_constraint_logrows = self.settings().model_constraint_logrows_with_blinding();
let min_bits = self.table_size_logrows(safe_lookup_range, max_range_size)?;
let constants_logrows = self.settings().constants_logrows();
max_logrows = std::cmp::min(
max_logrows,
*[model_constraint_logrows, min_bits, constants_logrows]
.iter()
.max()
.unwrap(),
);
max_logrows = std::cmp::max(min_logrows, max_logrows);
while min_logrows < max_logrows
&& !self.extended_k_is_small_enough(max_logrows, safe_lookup_range, max_range_size)
{
max_logrows -= 1;
}
if !self.extended_k_is_small_enough(max_logrows, safe_lookup_range, max_range_size) {
let err_string = format!(
"extended k is too large to accommodate the quotient polynomial with logrows {}",
max_logrows
);
debug!("{}", err_string);
return Err(err_string.into());
}
let logrows = max_logro |
ws;
let model = self.model().clone();
let settings_mut = self.settings_mut();
settings_mut.run_args.lookup_range = safe_lookup_range;
settings_mut.run_args.logrows = logrows;
*settings_mut = GraphCircuit::new(model, &settings_mut.run_args)?
.settings()
.clone();
debug!(
"setting lookup_range to: {:?}, setting logrows to: {}",
self.settings().run_args.lookup_range,
self.settings().run_args.logrows
);
Ok(())
}
fn extended_k_is_small_enough(
&self,
k: u32,
safe_lookup_range: Range,
max_range_size: i128,
) -> bool {
if Self::calc_num_cols(safe_lookup_range.1 - safe_lookup_range.0, k) > MAX_NUM_LOOKUP_COLS
|| Self::calc_num_cols(max_range_size, k) > MAX_NUM_LOOKUP_COLS
{
return false;
}
let mut settings = self.settings().clone();
settings.run_args.lookup_range = safe_lookup_range;
settings.run_args.logrows = k;
settings.required_range_checks = vec![(0, max_range_size)];
let mut cs = ConstraintSystem::default();
let _r = match Gag::stdout() {
Ok(g) => Some(g),
_ => None,
};
let _g = match Gag::stderr() {
Ok(g) => Some(g),
_ => None,
};
Self::configure_with_params(&mut cs, settings);
drop(_r);
drop(_g);
let cs = cs.chunk_lookups();
let max_degree = cs.degree();
let quotient_poly_degree = (max_degree - 1) as u64;
let n = 1u64 << k;
let mut extended_k = k;
while (1 << extended_k) < (n * quotient_poly_degree) {
extended_k += 1;
if extended_k > bn256::Fr::S {
return false;
}
}
true
}
pub fn forward<Scheme: CommitmentScheme<Scalar = Fp, Curve = G |
1Affine>>(
&self,
inputs: &mut [Tensor<Fp>],
vk: Option<&VerifyingKey<G1Affine>>,
srs: Option<&Scheme::ParamsProver>,
witness_gen: bool,
) -> Result<GraphWitness, Box<dyn std::error::Error>> {
let original_inputs = inputs.to_vec();
let visibility = VarVisibility::from_args(&self.settings().run_args)?;
let mut processed_inputs = None;
let mut processed_params = None;
let mut processed_outputs = None;
if visibility.input.requires_processing() {
let module_outlets = visibility.input.overwrites_inputs();
if !module_outlets.is_empty() {
let mut module_inputs = vec![];
for outlet in &module_outlets {
module_inputs.push(inputs[*outlet].clone());
}
let res =
GraphModules::forward::<Scheme>(&module_inputs, &visibility.input, vk, srs)?;
processed_inputs = Some(res.clone());
let module_results = res.get_result(visibility.input.clone());
for (i, outlet) in module_outlets.iter().enumerate() {
inputs[*outlet] = Tensor::from(module_results[i].clone().into_iter());
}
} else {
processed_inputs = Some(GraphModules::forward::<Scheme>(
inputs,
&visibility.input,
vk,
srs,
)?);
}
}
if visibility.params.requires_processing() {
let params = self.model().get_all_params();
if !params.is_empty() {
let flattened_params = Tensor::new(Some(¶ms), &[params.len()])?.combine()?;
processed_params = Some(GraphModules::forward::<Scheme>(
&[flattened_params],
&visibility.params,
vk,
srs,
)?);
}
}
let mut model_results |
=
self.model()
.forward(inputs, &self.settings().run_args, witness_gen)?;
if visibility.output.requires_processing() {
let module_outlets = visibility.output.overwrites_inputs();
if !module_outlets.is_empty() {
let mut module_inputs = vec![];
for outlet in &module_outlets {
module_inputs.push(model_results.outputs[*outlet].clone());
}
let res =
GraphModules::forward::<Scheme>(&module_inputs, &visibility.output, vk, srs)?;
processed_outputs = Some(res.clone());
let module_results = res.get_result(visibility.output.clone());
for (i, outlet) in module_outlets.iter().enumerate() {
model_results.outputs[*outlet] =
Tensor::from(module_results[i].clone().into_iter());
}
} else {
processed_outputs = Some(GraphModules::forward::<Scheme>(
&model_results.outputs,
&visibility.output,
vk,
srs,
)?);
}
}
let mut witness = GraphWitness {
inputs: original_inputs
.iter()
.map(|t| t.deref().to_vec())
.collect_vec(),
pretty_elements: None,
outputs: model_results
.outputs
.iter()
.map(|t| t.deref().to_vec())
.collect_vec(),
processed_inputs,
processed_params,
processed_outputs,
max_lookup_inputs: model_results.max_lookup_inputs,
min_lookup_inputs: model_results.min_lookup_inputs,
max_range_size: model_results.max_range_size,
};
witness.generate_rescaled_elements(
self.model().graph.get_input_scales(),
self.model().graph.get_output_scales()?,
visi |
bility,
);
log::trace!(
"witness: \n {}",
&witness.as_json()?.to_colored_json_auto()?
);
Ok(witness)
}
pub fn from_run_args(
run_args: &RunArgs,
model_path: &std::path::Path,
) -> Result<Self, Box<dyn std::error::Error>> {
let model = Model::from_run_args(run_args, model_path)?;
Self::new(model, run_args)
}
pub fn from_settings(
params: &GraphSettings,
model_path: &std::path::Path,
check_mode: CheckMode,
) -> Result<Self, Box<dyn std::error::Error>> {
params.run_args.validate()?;
let model = Model::from_run_args(¶ms.run_args, model_path)?;
Self::new_from_settings(model, params.clone(), check_mode)
}
pub async fn populate_on_chain_test_data(
&mut self,
data: &mut GraphData,
test_on_chain_data: TestOnChainData,
) -> Result<(), Box<dyn std::error::Error>> {
let input_scales = self.model().graph.get_input_scales();
let output_scales = self.model().graph.get_output_scales()?;
let input_shapes = self.model().graph.input_shapes()?;
let output_shapes = self.model().graph.output_shapes()?;
if matches!(
test_on_chain_data.data_sources.input,
TestDataSource::OnChain
) {
if self.settings().run_args.input_visibility.is_private() {
return Err("Cannot use on-chain data source as private data".into());
}
let input_data = match &data.input_data {
DataSource::File(input_data) => input_data,
_ => {
return Err("Cannot use non file source as input for on-chain test.
Manually populate on-chain data from file source instead"
.into())
}
};
let datam: (Vec<Tensor<Fp>>, OnChainSource) = OnChai |
nSource::test_from_file_data(
input_data,
input_scales,
input_shapes,
test_on_chain_data.rpc.as_deref(),
)
.await?;
data.input_data = datam.1.into();
}
if matches!(
test_on_chain_data.data_sources.output,
TestDataSource::OnChain
) {
if self.settings().run_args.output_visibility.is_private() {
return Err("Cannot use on-chain data source as private data".into());
}
let output_data = match &data.output_data {
Some(DataSource::File(output_data)) => output_data,
Some(DataSource::OnChain(_)) => {
return Err(
"Cannot use on-chain data source as output for on-chain test.
Will manually populate on-chain data from file source instead"
.into(),
)
}
_ => return Err("No output data found".into()),
};
let datum: (Vec<Tensor<Fp>>, OnChainSource) = OnChainSource::test_from_file_data(
output_data,
output_scales,
output_shapes,
test_on_chain_data.rpc.as_deref(),
)
.await?;
data.output_data = Some(datum.1.into());
}
data.save(test_on_chain_data.data)?;
Ok(())
}
}
pub |
struct CircuitSize {
num_instances: usize,
num_advice_columns: usize,
num_fixed: usize,
num_challenges: usize,
num_selectors: usize,
logrows: u32,
}
impl CircuitSize {
pub fn from_cs<F: Field>(cs: &ConstraintSystem<F>, logrows: u32) -> Self {
CircuitSize {
num_instances: cs.num_instance_columns(),
num_advice_columns: cs.num_advice_columns(),
num_fixed: cs.num_fixed_columns(),
num_challenges: cs.num_challenges(),
num_selectors: cs.num_selectors(),
logrows,
}
}
pub fn as_json(&self) -> Result<String, Box<dyn std::error::Error>> {
let serialized = match serde_json::to_string(&self) {
Ok(s) => s,
Err(e) => {
return Err(Box::new(e));
}
};
Ok(serialized)
}
pub fn num_columns(&self) -> usize {
self.num_instances + self.num_advice_columns + self.num_fixed
}
pub fn area(&self) -> usize {
self.num_columns() * (1 << self.logrows)
}
pub fn area_less_than_max(&self) -> bool {
if EZKL_MAX_CIRCUIT_AREA.is_some() {
self.area() < EZKL_MAX_CIRCUIT_AREA.unwrap()
} else {
true
}
}
}
impl Circuit<Fp> for GraphCircuit {
type Config = GraphConfig;
type FloorPlanner = ModulePlanner;
type Params = GraphSettings;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn params(&self) -> Self::Params {
self.settings().clone()
}
fn configure_with_params(cs: &mut ConstraintSystem<Fp>, params: Self::Params) -> Self::Config {
let mut params = params.clone();
params.set_num_blinding_factors(cs.blinding_factors());
GLOBAL_SETTINGS.with(|settings| {
*settings.borrow_mut() = Some(params.clone());
});
let visibility = match VarVisibility::from_args(¶ms.run_args) {
Ok(v) => v,
Err(e) => { |
log::error!("failed to create visibility: {:?}", e);
log::warn!("using default visibility");
VarVisibility::default()
}
};
let mut module_configs = ModuleConfigs::from_visibility(
cs,
params.module_sizes.clone(),
params.run_args.logrows as usize,
);
let mut vars = ModelVars::new(cs, ¶ms);
module_configs.configure_complex_modules(cs, visibility, params.module_sizes.clone());
vars.instantiate_instance(
cs,
params.model_instance_shapes.clone(),
params.run_args.input_scale,
module_configs.instance,
);
let base = Model::configure(cs, &vars, ¶ms).unwrap();
let model_config = ModelConfig { base, vars };
debug!(
"degree: {}, log2_ceil of degrees: {:?}",
cs.degree(),
(cs.degree() as f32).log2().ceil()
);
let circuit_size = CircuitSize::from_cs(cs, params.run_args.logrows);
debug!(
"circuit size: \n {}",
circuit_size
.as_json()
.unwrap()
.to_colored_json_auto()
.unwrap()
);
GraphConfig {
model_config,
module_configs,
circuit_size,
}
}
fn configure(_: &mut ConstraintSystem<Fp>) -> Self::Config {
unimplemented!("you should call configure_with_params instead")
}
fn synthesize(
&self,
config: Self::Config,
mut layouter: impl Layouter<Fp>,
) -> Result<(), PlonkError> {
if !config.circuit_size.area_less_than_max() {
error!(
"circuit area {} is larger than the max allowed area {}",
config.circuit_size.area(),
EZKL_MAX_CIRCUIT_AREA.unwrap()
);
return Err(PlonkError::Synthesis);
}
trace!("Setting input in synthesize |
");
let input_vis = &self.settings().run_args.input_visibility;
let output_vis = &self.settings().run_args.output_visibility;
let mut graph_modules = GraphModules::new();
let mut constants = ConstantsMap::new();
let mut config = config.clone();
let mut inputs = self
.graph_witness
.get_input_tensor()
.iter_mut()
.map(|i| {
i.set_visibility(input_vis);
ValTensor::try_from(i.clone()).map_err(|e| {
log::error!("failed to convert input to valtensor: {:?}", e);
PlonkError::Synthesis
})
})
.collect::<Result<Vec<ValTensor<Fp>>, PlonkError>>()?;
let outputs = self
.graph_witness
.get_output_tensor()
.iter_mut()
.map(|i| {
i.set_visibility(output_vis);
ValTensor::try_from(i.clone()).map_err(|e| {
log::error!("failed to convert output to valtensor: {:?}", e);
PlonkError::Synthesis
})
})
.collect::<Result<Vec<ValTensor<Fp>>, PlonkError>>()?;
let mut instance_offset = 0;
trace!("running input module layout");
let input_visibility = &self.settings().run_args.input_visibility;
let outlets = input_visibility.overwrites_inputs();
if !outlets.is_empty() {
let mut input_outlets = vec![];
for outlet in &outlets {
input_outlets.push(inputs[*outlet].clone());
}
graph_modules.layout(
&mut layouter,
&mut config.module_configs,
&mut input_outlets,
input_visibility,
&mut instance_offset,
&mut constants,
)?;
for (i, outlet) in outlets.iter().enumerate() {
inputs[*outlet] = input_outlets[i].clone();
} |
} else {
graph_modules.layout(
&mut layouter,
&mut config.module_configs,
&mut inputs,
input_visibility,
&mut instance_offset,
&mut constants,
)?;
}
let mut model = self.model().clone();
let param_visibility = &self.settings().run_args.param_visibility;
trace!("running params module layout");
if !self.model().get_all_params().is_empty() && param_visibility.requires_processing() {
let consts = self.model().get_all_params();
let mut flattened_params = {
let mut t = Tensor::new(Some(&consts), &[consts.len()])
.map_err(|_| {
log::error!("failed to flatten params");
PlonkError::Synthesis
})?
.combine()
.map_err(|_| {
log::error!("failed to combine params");
PlonkError::Synthesis
})?;
t.set_visibility(param_visibility);
vec![t.try_into().map_err(|_| {
log::error!("failed to convert params to valtensor");
PlonkError::Synthesis
})?]
};
graph_modules.layout(
&mut layouter,
&mut config.module_configs,
&mut flattened_params,
param_visibility,
&mut instance_offset,
&mut constants,
)?;
let shapes = self.model().const_shapes();
trace!("replacing processed consts");
let split_params = split_valtensor(&flattened_params[0], shapes).map_err(|_| {
log::error!("failed to split params");
PlonkError::Synthesis
})?;
model.replace_consts(&split_params);
}
lay |
outer.assign_region(|| "_enter_module_2", |_| Ok(()))?;
trace!("laying out model");
let mut vars = config.model_config.vars.clone();
vars.set_initial_instance_offset(instance_offset);
let mut outputs = model
.layout(
config.model_config.clone(),
&mut layouter,
&self.settings().run_args,
&inputs,
&mut vars,
&outputs,
&mut constants,
)
.map_err(|e| {
log::error!("{}", e);
PlonkError::Synthesis
})?;
trace!("running output module layout");
let output_visibility = &self.settings().run_args.output_visibility;
let outlets = output_visibility.overwrites_inputs();
instance_offset += vars.get_instance_len();
if !outlets.is_empty() {
let mut output_outlets = vec![];
for outlet in &outlets {
output_outlets.push(outputs[*outlet].clone());
}
graph_modules.layout(
&mut layouter,
&mut config.module_configs,
&mut output_outlets,
&self.settings().run_args.output_visibility,
&mut instance_offset,
&mut constants,
)?;
for (i, outlet) in outlets.iter().enumerate() {
outputs[*outlet] = output_outlets[i].clone();
}
} else {
graph_modules.layout(
&mut layouter,
&mut config.module_configs,
&mut outputs,
&self.settings().run_args.output_visibility,
&mut instance_offset,
&mut constants,
)?;
}
Ok(())
}
} |
use super::extract_const_quantized_values;
use super::node::*;
use super::scale_to_multiplier;
use super::vars::*;
use super::GraphError;
use super::GraphSettings;
use crate::circuit::hybrid::HybridOp;
use crate::circuit::region::ConstantsMap;
use crate::circuit::region::RegionCtx;
use crate::circuit::table::Range;
use crate::circuit::Input;
use crate::circuit::InputType;
use crate::circuit::Unknown;
use crate::tensor::ValType;
use crate::{
circuit::{lookup::LookupOp, BaseConfig as PolyConfig, CheckMode, Op},
tensor::{Tensor, ValTensor},
RunArgs,
};
use halo2curves::bn256::Fr as Fp;
use super::input::GraphData;
use colored::Colorize;
use halo2_proofs::{
circuit::{Layouter, Value},
plonk::ConstraintSystem,
};
use halo2curves::ff::Field;
use itertools::Itertools;
use log::error;
use log::{debug, info, trace};
use serde::Deserialize;
use serde::Serialize;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::error::Error;
use std::fs;
use std::io::Read;
use std::path::PathBuf;
use tabled::Table;
use tract_onnx;
use tract_onnx::prelude::{
Framework, Graph, InferenceFact, InferenceModelExt, SymbolValues, TypedFact, TypedOp,
};
use tract_onnx::tract_core::internal::DatumType;
use tract_onnx::tract_hir::ops::scan::Scan;
use unzip_n::unzip_n;
unzip_n!(pub 3);
type TractResult = (Graph<TypedFact, Box<dyn TypedOp>>, SymbolValues);
pub |
struct ForwardResult {
pub outputs: Vec<Tensor<Fp>>,
pub max_lookup_inputs: i128,
pub min_lookup_inputs: i128,
pub max_range_size: i128,
}
impl From<DummyPassRes> for ForwardResult {
fn from(res: DummyPassRes) -> Self {
Self {
outputs: res.outputs,
max_lookup_inputs: res.max_lookup_inputs,
min_lookup_inputs: res.min_lookup_inputs,
max_range_size: res.max_range_size,
}
}
}
pub |
struct ModelConfig {
pub base: PolyConfig<Fp>,
pub vars: ModelVars<Fp>,
}
pub type NodeGraph = BTreeMap<usize, NodeType>;
pub |
struct DummyPassRes {
pub num_rows: usize,
pub num_dynamic_lookups: usize,
pub dynamic_lookup_col_coord: usize,
pub num_shuffles: usize,
pub shuffle_col_coord: usize,
pub linear_coord: usize,
pub total_const_size: usize,
pub lookup_ops: HashSet<LookupOp>,
pub range_checks: HashSet<Range>,
pub max_lookup_inputs: i128,
pub min_lookup_inputs: i128,
pub max_range_size: i128,
pub outputs: Vec<Tensor<Fp>>,
}
pub |
struct Model {
pub graph: ParsedNodes,
pub visibility: VarVisibility,
}
pub enum OutputMapping {
Single {
outlet: usize,
is_state: bool,
},
Stacked {
outlet: usize,
axis: usize,
is_state: bool,
},
}
impl OutputMapping {
pub fn is_state(&self) -> bool {
match self {
OutputMapping::Single { is_state, .. } => *is_state,
OutputMapping::Stacked { is_state, .. } => *is_state,
}
}
pub fn outlet(&self) -> usize {
match self {
OutputMapping::Single { outlet, .. } => *outlet,
OutputMapping::Stacked { outlet, .. } => *outlet,
}
}
}
pub enum InputMapping {
Full,
State,
Stacked {
axis: usize,
chunk: usize,
},
}
fn number_of_iterations(mappings: &[InputMapping], dims: Vec<&[usize]>) -> usize {
let mut number_of_iterations =
dims.iter()
.zip(mappings)
.filter_map(|(dims, mapping)| match mapping {
InputMapping::Stacked { axis, chunk } => Some(
(dims[*axis] + chunk - 1) / chunk,
),
_ => None,
});
assert!(number_of_iterations.clone().all_equal());
number_of_iterations.next().unwrap_or(1)
}
fn input_state_idx(input_mappings: &[InputMapping]) -> Vec<usize> {
input_mappings
.iter()
.enumerate()
.filter(|(_, r)| matches!(r, InputMapping::State))
.map(|(index, _)| index)
.collect::<Vec<_>>()
}
fn output_state_idx(output_mappings: &[Vec<OutputMapping>]) -> Vec<usize> {
output_mappings
.iter()
.flatten()
.filter_map(|x| if x.is_state() { Some(x.outlet()) } else { None })
.collect::<Vec<_>>()
}
pub enum NodeType {
Node(Node),
SubGraph {
model: Model |
,
inputs: Vec<Outlet>,
idx: usize,
output_mappings: Vec<Vec<OutputMapping>>,
input_mappings: Vec<InputMapping>,
out_dims: Vec<Vec<usize>>,
out_scales: Vec<crate::Scale>,
},
}
impl NodeType {
pub fn is_lookup(&self) -> bool {
match self {
NodeType::Node(n) => n.opkind.is_lookup(),
NodeType::SubGraph { .. } => false,
}
}
pub fn num_uses(&self) -> usize {
match self {
NodeType::Node(n) => n.num_uses,
NodeType::SubGraph { .. } => 0,
}
}
pub fn inputs(&self) -> Vec<Outlet> {
match self {
NodeType::Node(n) => n.inputs.clone(),
NodeType::SubGraph { inputs, .. } => inputs.clone(),
}
}
pub fn out_dims(&self) -> Vec<Vec<usize>> {
match self {
NodeType::Node(n) => vec![n.out_dims.clone()],
NodeType::SubGraph { out_dims, .. } => out_dims.clone(),
}
}
pub fn out_scales(&self) -> Vec<crate::Scale> {
match self {
NodeType::Node(n) => vec![n.out_scale],
NodeType::SubGraph { out_scales, .. } => out_scales.clone(),
}
}
pub fn as_str(&self) -> String {
match self {
NodeType::Node(n) => n.opkind.as_string(),
NodeType::SubGraph { .. } => "SUBGRAPH".into(),
}
}
pub fn is_rebase(&self) -> bool {
match self {
NodeType::Node(n) => matches!(n.opkind, SupportedOp::RebaseScale { .. }),
NodeType::SubGraph { .. } => false,
}
}
pub fn is_input(&self) -> bool {
match self {
NodeType::Node(n) => n.opkind.is_input(),
NodeType::SubGraph { .. } => false,
}
}
pub fn is_constant(&self) -> bool {
match self {
NodeType::Node(n) => n.opkind.is_constant(),
NodeType::SubGraph { .. } => fa |
lse,
}
}
pub fn idx(&self) -> usize {
match self {
NodeType::Node(n) => n.idx,
NodeType::SubGraph { idx, .. } => *idx,
}
}
pub |
fn decrement_use(&mut self) {
match self {
NodeType::Node(n) => n.num_uses -= 1,
NodeType::SubGraph { .. } => log::warn!("Cannot decrement const of subgraph"),
}
}
pub |
fn bump_scale(&mut self, scale: crate::Scale) {
match self {
NodeType::Node(n) => n.out_scale = scale,
NodeType::SubGraph { .. } => log::warn!("Cannot bump scale of subgraph"),
}
}
pub |
fn replace_opkind(&mut self, opkind: SupportedOp) {
match self {
NodeType::Node(n) => n.opkind = opkind,
NodeType::SubGraph { .. } => log::warn!("Cannot replace opkind of subgraph"),
}
}
pub fn opkind(&self) -> SupportedOp {
match self {
NodeType::Node(n) => n.opkind.clone(),
NodeType::SubGraph { .. } => SupportedOp::Unknown(Unknown),
}
}
}
pub |
struct ParsedNodes {
pub nodes: BTreeMap<usize, NodeType>,
inputs: Vec<usize>,
outputs: Vec<Outlet>,
}
impl ParsedNodes {
pub fn num_inputs(&self) -> usize {
let input_nodes = self.inputs.iter();
input_nodes.len()
}
pub fn get_input_types(&self) -> Result<Vec<InputType>, GraphError> {
self.inputs
.iter()
.map(|o| {
match self
.nodes
.get(o)
.ok_or(GraphError::MissingNode(*o))?
.opkind()
{
SupportedOp::Input(Input { datum_type, .. }) => Ok(datum_type.clone()),
_ => Err(GraphError::InvalidInputTypes),
}
})
.collect::<Result<Vec<_>, _>>()
}
pub fn input_shapes(&self) -> Result<Vec<Vec<usize>>, Box<dyn Error>> {
let mut inputs = vec![];
for input in self.inputs.iter() {
let node = self
.nodes
.get(input)
.ok_or(GraphError::MissingNode(*input))?;
let input_dims = node.out_dims();
let input_dim = input_dims.first().ok_or(GraphError::MissingNode(*input))?;
inputs.push(input_dim.clone());
}
Ok(inputs)
}
pub fn num_outputs(&self) -> usize {
let output_nodes = self.outputs.iter();
output_nodes.len()
}
pub fn output_shapes(&self) -> Result<Vec<Vec<usize>>, GraphError> {
let mut outputs = vec![];
for output in self.outputs.iter() {
let (idx, outlet) = output;
let node = self.nodes.get(idx).ok_or(GraphError::MissingNode(*idx))?;
let out_dims = node.out_dims();
let out_dim = out_dims
.get(*outlet)
.ok_or(GraphError::MissingNode(*outlet))?;
outputs.push(out_dim.clone());
}
Ok(outputs)
}
pub fn get_input_scales(&self) -> Vec<crate: |
:Scale> {
let input_nodes = self.inputs.iter();
input_nodes
.flat_map(|idx| {
self.nodes
.get(idx)
.ok_or(GraphError::MissingNode(*idx))
.map(|n| n.out_scales())
.unwrap_or_default()
})
.collect()
}
pub fn get_output_scales(&self) -> Result<Vec<crate::Scale>, GraphError> {
let output_nodes = self.outputs.iter();
output_nodes
.map(|(idx, outlet)| {
Ok(self
.nodes
.get(idx)
.ok_or(GraphError::MissingNode(*idx))?
.out_scales()[*outlet])
})
.collect::<Result<Vec<_>, GraphError>>()
}
}
impl Model {
pub fn new(reader: &mut dyn std::io::Read, run_args: &RunArgs) -> Result<Self, Box<dyn Error>> {
let visibility = VarVisibility::from_args(run_args)?;
let graph = Self::load_onnx_model(reader, run_args, &visibility)?;
let om = Model { graph, visibility };
debug!("\n {}", om.table_nodes());
Ok(om)
}
pub fn save(&self, path: PathBuf) -> Result<(), Box<dyn Error>> {
let f = std::fs::File::create(path)?;
let writer = std::io::BufWriter::new(f);
bincode::serialize_into(writer, &self)?;
Ok(())
}
pub fn load(path: PathBuf) -> Result<Self, Box<dyn Error>> {
let mut f = std::fs::File::open(&path)?;
let metadata = fs::metadata(&path)?;
let mut buffer = vec![0; metadata.len() as usize];
f.read_exact(&mut buffer)?;
let result = bincode::deserialize(&buffer)?;
Ok(result)
}
pub fn gen_params(
&self,
run_args: &RunArgs,
check_mode: CheckMode,
) -> Result<GraphSettings, Box<dyn Error>> {
let instance_shapes = self.instance_shapes()?;
debug!(
"{} {} {}",
" |
model has".blue(),
instance_shapes.len().to_string().blue(),
"instances".blue()
);
let inputs: Vec<ValTensor<Fp>> = self
.graph
.input_shapes()?
.iter()
.map(|shape| {
let len = shape.iter().product();
let mut t: ValTensor<Fp> = (0..len)
.map(|_| {
if !self.visibility.input.is_fixed() {
ValType::Value(Value::<Fp>::unknown())
} else {
ValType::Constant(Fp::random(&mut rand::thread_rng()))
}
})
.collect::<Vec<_>>()
.into();
t.reshape(shape)?;
Ok(t)
})
.collect::<Result<Vec<_>, Box<dyn Error>>>()?;
let res = self.dummy_layout(run_args, &inputs, false)?;
Ok(GraphSettings {
run_args: run_args.clone(),
model_instance_shapes: instance_shapes,
module_sizes: crate::graph::modules::ModuleSizes::default(),
num_rows: res.num_rows,
total_assignments: res.linear_coord,
required_lookups: res.lookup_ops.into_iter().collect(),
required_range_checks: res.range_checks.into_iter().collect(),
model_output_scales: self.graph.get_output_scales()?,
model_input_scales: self.graph.get_input_scales(),
num_dynamic_lookups: res.num_dynamic_lookups,
total_dynamic_col_size: res.dynamic_lookup_col_coord,
num_shuffles: res.num_shuffles,
total_shuffle_col_size: res.shuffle_col_coord,
total_const_size: res.total_const_size,
check_mode,
version: env!("CARGO_PKG_VERSION").to_string(),
num_blinding_factors: None,
timestamp: Some(
instant::SystemTime::now()
.duration_since |
(instant::SystemTime::UNIX_EPOCH)?
.as_millis(),
),
timestamp: None,
})
}
pub fn forward(
&self,
model_inputs: &[Tensor<Fp>],
run_args: &RunArgs,
witness_gen: bool,
) -> Result<ForwardResult, Box<dyn Error>> {
let valtensor_inputs: Vec<ValTensor<Fp>> = model_inputs
.iter()
.map(|x| x.map(|elem| ValType::Value(Value::known(elem))).into())
.collect();
let res = self.dummy_layout(run_args, &valtensor_inputs, witness_gen)?;
Ok(res.into())
}
fn load_onnx_using_tract(
reader: &mut dyn std::io::Read,
run_args: &RunArgs,
) -> Result<TractResult, Box<dyn Error>> {
use tract_onnx::{
tract_core::internal::IntoArcTensor, tract_hir::internal::GenericFactoid,
};
let mut model = tract_onnx::onnx().model_for_read(reader).map_err(|e| {
error!("Error loading model: {}", e);
GraphError::ModelLoad
})?;
let variables: std::collections::HashMap<String, usize> =
std::collections::HashMap::from_iter(run_args.variables.clone());
for (i, id) in model.clone().inputs.iter().enumerate() {
let input = model.node_mut(id.node);
let mut fact: InferenceFact = input.outputs[0].fact.clone();
for (i, x) in fact.clone().shape.dims().enumerate() {
if matches!(x, GenericFactoid::Any) {
let batch_size = match variables.get("batch_size") {
Some(x) => x,
None => return Err("Unknown dimension batch_size in model inputs, set batch_size in variables".into()),
};
fact.shape
.set_dim(i, tract_onnx::prelude::TDim::Val(*batch_size as i64));
}
}
model.set_input_fact(i, fact)?;
} |
for (i, _) in model.clone().outputs.iter().enumerate() {
model.set_output_fact(i, InferenceFact::default())?;
}
let mut symbol_values = SymbolValues::default();
for (symbol, value) in run_args.variables.iter() {
let symbol = model.symbol_table.sym(symbol);
symbol_values = symbol_values.with(&symbol, *value as i64);
debug!("set {} to {}", symbol, value);
}
let mut typed_model = model
.into_typed()?
.concretize_dims(&symbol_values)?
.into_decluttered()?;
for node in typed_model.eval_order()? {
let node = typed_model.node_mut(node);
if let Some(op) = node.op_as_mut::<tract_onnx::tract_core::ops::konst::Const>() {
if op.0.datum_type() == DatumType::TDim {
let mut constant = op.0.as_ref().clone();
constant
.as_slice_mut::<tract_onnx::prelude::TDim>()?
.iter_mut()
.for_each(|x| *x = x.eval(&symbol_values));
op.0 = constant.into_arc_tensor();
}
}
}
Ok((typed_model, symbol_values))
}
fn load_onnx_model(
reader: &mut dyn std::io::Read,
run_args: &RunArgs,
visibility: &VarVisibility,
) -> Result<ParsedNodes, Box<dyn Error>> {
let start_time = instant::Instant::now();
let (model, symbol_values) = Self::load_onnx_using_tract(reader, run_args)?;
let scales = VarScales::from_args(run_args)?;
let nodes = Self::nodes_from_graph(
&model,
run_args,
&scales,
visibility,
&symbol_values,
None,
None,
)?;
debug!("\n {}", model);
let parsed_nodes = ParsedNodes {
nodes,
inputs: model.inputs.iter().map |
(|o| o.node).collect(),
outputs: model.outputs.iter().map(|o| (o.node, o.slot)).collect(),
};
let duration = start_time.elapsed();
trace!("model loading took: {:?}", duration);
Ok(parsed_nodes)
}
pub fn table_nodes(&self) -> String {
let mut node_accumulator = vec![];
let mut string = String::new();
for (idx, node) in &self.graph.nodes {
match node {
NodeType::Node(n) => {
node_accumulator.push(n);
}
NodeType::SubGraph { model, inputs, .. } => {
let mut table = Table::new(node_accumulator.iter());
table.with(tabled::settings::Style::modern());
table.with(tabled::settings::Shadow::new(1));
table.with(
tabled::settings::style::BorderColor::default()
.top(tabled::settings::Color::BG_YELLOW),
);
string = format!("{} \n\n MAIN GRAPH \n\n{}", string, table);
node_accumulator = vec![];
string = format!(
"{}\n\n SUBGRAPH AT IDX {} WITH INPUTS {:?}\n{}",
string,
idx,
inputs,
model.table_nodes(),
);
}
}
}
let mut table = Table::new(node_accumulator.iter());
table.with(tabled::settings::Style::modern());
format!("{} \n{}", string, table)
}
pub fn nodes_from_graph(
graph: &Graph<TypedFact, Box<dyn TypedOp>>,
run_args: &RunArgs,
scales: &VarScales,
visibility: &VarVisibility,
symbol_values: &SymbolValues,
override_input_scales: Option<Vec<crate::Scale>>,
override_output_scales: Option<HashMap<usize, crate::Scale>>,
) -> Result<BTreeMap<usize, NodeType |
>, Box<dyn Error>> {
use crate::graph::node_output_shapes;
let mut nodes = BTreeMap::<usize, NodeType>::new();
let mut input_idx = 0;
for (i, n) in graph.nodes.iter().enumerate() {
match n.op().downcast_ref::<Scan>() {
Some(b) => {
let model = b.body.clone();
let input_scales = n
.inputs
.iter()
.map(|i| {
Ok(nodes
.get(&i.node)
.ok_or(GraphError::MissingNode(i.node))?
.out_scales()[0])
})
.collect::<Result<Vec<_>, GraphError>>()?;
let mut input_mappings = vec![];
for mapping in &b.input_mapping {
match mapping {
tract_onnx::tract_hir::ops::scan::InputMapping::Scan(info) => {
input_mappings.push(InputMapping::Stacked {
axis: info.axis,
chunk: info.chunk as usize,
});
}
tract_onnx::tract_hir::ops::scan::InputMapping::State => {
input_mappings.push(InputMapping::State);
}
tract_onnx::tract_hir::ops::scan::InputMapping::Full => {
input_mappings.push(InputMapping::Full);
}
}
}
let input_state_idx = input_state_idx(&input_mappings);
let mut output_mappings = vec![];
for (i, mapping) in b.output_mapping.iter().enumerate() {
let mut mappings = vec![];
if let Some(outlet) = mapping.last_value_s |
lot {
mappings.push(OutputMapping::Single {
outlet,
is_state: mapping.state,
});
} else if mapping.state {
mappings.push(OutputMapping::Single {
outlet: i,
is_state: mapping.state,
});
}
if let Some(last) = mapping.scan {
mappings.push(OutputMapping::Stacked {
outlet: last.0,
axis: last.1.axis,
is_state: false,
});
}
output_mappings.push(mappings);
}
let output_state_idx = output_state_idx(&output_mappings);
let mut output_scale_override = HashMap::new();
for (input_idx, output_idx) in input_state_idx.iter().zip(output_state_idx) {
let input_scale = input_scales[*input_idx];
let mut traversed_len = 0;
for (outer_idx, mappings) in output_mappings.iter().enumerate() {
let mapping_len = mappings.len();
if traversed_len + mapping_len > output_idx {
let output_node_idx = b.body.outputs[outer_idx].node;
output_scale_override.insert(output_node_idx, input_scale);
}
traversed_len += mapping_len;
}
}
let subgraph_nodes = Self::nodes_from_graph(
&model,
run_args,
scales,
visibility, |
symbol_values,
Some(input_scales.clone()),
Some(output_scale_override),
)?;
let subgraph = ParsedNodes {
nodes: subgraph_nodes,
inputs: model.inputs.iter().map(|o| o.node).collect(),
outputs: model.outputs.iter().map(|o| (o.node, o.slot)).collect(),
};
let om = Model {
graph: subgraph,
visibility: visibility.clone(),
};
let out_dims = node_output_shapes(n, symbol_values)?;
let mut output_scales = BTreeMap::new();
for (i, _mapping) in b.output_mapping.iter().enumerate() {
for mapping in b.output_mapping.iter() {
if let Some(outlet) = mapping.last_value_slot {
output_scales.insert(outlet, om.graph.get_output_scales()?[i]);
}
if let Some(last) = mapping.scan {
output_scales.insert(last.0, om.graph.get_output_scales()?[i]);
}
}
}
let out_scales = output_scales.into_values().collect_vec();
nodes.insert(
i,
NodeType::SubGraph {
model: om,
inputs: n.inputs.iter().map(|i| (i.node, i.slot)).collect_vec(),
idx: i,
output_mappings,
input_mappings,
out_dims,
out_scales,
},
);
}
None => {
let mut n = Node::new(
n.clone(),
&m |
ut nodes,
scales,
&run_args.param_visibility,
i,
symbol_values,
run_args.div_rebasing,
run_args.rebase_frac_zero_constants,
)?;
if let Some(ref scales) = override_input_scales {
if let Some(inp) = n.opkind.get_input() {
let scale = scales[input_idx];
n.opkind = SupportedOp::Input(Input {
scale,
datum_type: inp.datum_type,
});
input_idx += 1;
n.out_scale = scale;
}
}
if let Some(ref scales) = override_output_scales {
if scales.contains_key(&i) {
let scale_diff = n.out_scale - scales[&i];
n.opkind = if scale_diff > 0 {
RebaseScale::rebase(
n.opkind,
scales[&i],
n.out_scale,
1,
run_args.div_rebasing,
)
} else {
RebaseScale::rebase_up(
n.opkind,
scales[&i],
n.out_scale,
run_args.div_rebasing,
)
};
n.out_scale = scales[&i];
}
}
nodes.insert(i, NodeType::Node(n));
}
}
}
Self::remove_unused_nodes(&mut nodes);
Ok(nodes)
} |
fn remove_unused_nodes(nodes: &mut BTreeMap<usize, NodeType>) {
nodes.retain(|_, n| match n {
NodeType::Node(n) => match &mut n.opkind {
SupportedOp::Constant(c) => {
c.empty_raw_value();
n.num_uses > 0
}
_ => n.num_uses > 0,
},
NodeType::SubGraph { model, .. } => {
Self::remove_unused_nodes(&mut model.graph.nodes);
true
}
});
}
pub fn run_onnx_predictions(
run_args: &RunArgs,
model_path: &std::path::Path,
data_chunks: &[GraphData],
input_shapes: Vec<Vec<usize>>,
) -> Result<Vec<Vec<Tensor<f32>>>, Box<dyn Error>> {
use tract_onnx::tract_core::internal::IntoArcTensor;
let (model, _) = Model::load_onnx_using_tract(
&mut std::fs::File::open(model_path)
.map_err(|_| format!("failed to load {}", model_path.display()))?,
run_args,
)?;
let datum_types: Vec<DatumType> = model
.input_outlets()?
.iter()
.map(|o| model.node(o.node).outputs[o.slot].fact.datum_type)
.collect();
let runnable_model = model.into_runnable()?;
let mut outputs = vec![];
for chunk in data_chunks {
let result = runnable_model.run(chunk.to_tract_data(&input_shapes, &datum_types)?)?;
outputs.push(
result
.into_iter()
.map(|t| {
crate::graph::utilities::extract_tensor_value(t.into_arc_tensor()).unwrap()
})
.collect(),
);
}
Ok(outputs)
}
pub fn from_run_args(
run_args: &RunArgs,
model: &std::path::Path,
) -> Result<Self, Box<dyn Error>> {
Model::new(
&mut std::fs::File::open(model)
.map_err(|_| format!("failed to l |
oad {}", model.display()))?,
run_args,
)
}
pub fn configure(
meta: &mut ConstraintSystem<Fp>,
vars: &ModelVars<Fp>,
settings: &GraphSettings,
) -> Result<PolyConfig<Fp>, Box<dyn Error>> {
debug!("configuring model");
let lookup_range = settings.run_args.lookup_range;
let logrows = settings.run_args.logrows as usize;
let required_lookups = settings.required_lookups.clone();
let required_range_checks = settings.required_range_checks.clone();
let mut base_gate = PolyConfig::configure(
meta,
vars.advices[0..2].try_into()?,
&vars.advices[2],
settings.check_mode,
);
let input = &vars.advices[0];
let output = &vars.advices[2];
let index = &vars.advices[1];
for op in required_lookups {
base_gate.configure_lookup(meta, input, output, index, lookup_range, logrows, &op)?;
}
for range in required_range_checks {
base_gate.configure_range_check(meta, input, index, range, logrows)?;
}
if settings.requires_dynamic_lookup() {
base_gate.configure_dynamic_lookup(
meta,
vars.advices[0..3].try_into()?,
vars.advices[3..6].try_into()?,
)?;
}
if settings.requires_shuffle() {
base_gate.configure_shuffles(
meta,
vars.advices[0..2].try_into()?,
vars.advices[3..5].try_into()?,
)?;
}
Ok(base_gate)
}
pub fn layout(
&self,
mut config: ModelConfig,
layouter: &mut impl Layouter<Fp>,
run_args: &RunArgs,
inputs: &[ValTensor<Fp>],
vars: &mut ModelVars<Fp>,
witnessed_outputs: &[ValTensor<Fp>],
constants: &mut ConstantsMap<Fp>,
) -> Result<Vec<ValTensor<Fp>>, Box<dyn Error>> { |
info!("model layout...");
let start_time = instant::Instant::now();
let mut results = BTreeMap::<usize, Vec<ValTensor<Fp>>>::new();
let input_shapes = self.graph.input_shapes()?;
for (i, input_idx) in self.graph.inputs.iter().enumerate() {
if self.visibility.input.is_public() {
let instance = vars.instance.as_ref().ok_or("no instance")?.clone();
results.insert(*input_idx, vec![instance]);
vars.increment_instance_idx();
} else {
let mut input = inputs[i].clone();
input.reshape(&input_shapes[i])?;
results.insert(*input_idx, vec![input]);
}
}
let instance_idx = vars.get_instance_idx();
config.base.layout_tables(layouter)?;
config.base.layout_range_checks(layouter)?;
let original_constants = constants.clone();
let outputs = layouter.assign_region(
|| "model",
|region| {
let mut thread_safe_region = RegionCtx::new_with_constants(region, 0, run_args.num_inner_cols, original_constants.clone());
vars.set_instance_idx(instance_idx);
let outputs = self
.layout_nodes(&mut config, &mut thread_safe_region, &mut results)
.map_err(|e| {
error!("{}", e);
halo2_proofs::plonk::Error::Synthesis
})?;
if run_args.output_visibility.is_public() || run_args.output_visibility.is_fixed() {
let output_scales = self.graph.get_output_scales().map_err(|e| {
error!("{}", e);
halo2_proofs::plonk::Error::Synthesis
})?;
let res = outputs
.iter()
.enumerate()
.map(|(i, output)| {
let mut tolerance = run_args.tol |
erance;
tolerance.scale = scale_to_multiplier(output_scales[i]).into();
let comparators = if run_args.output_visibility == Visibility::Public {
let res = vars.instance.as_ref().ok_or("no instance")?.clone();
vars.increment_instance_idx();
res
} else {
if witnessed_outputs.len() <= i {
return Err("you provided insufficient witness values to generate a fixed output".into());
}
witnessed_outputs[i].clone()
};
config.base.layout(
&mut thread_safe_region,
&[output.clone(), comparators],
Box::new(HybridOp::RangeCheck(tolerance)),
)
})
.collect::<Result<Vec<_>,_>>();
res.map_err(|e| {
error!("{}", e);
halo2_proofs::plonk::Error::Synthesis
})?;
}
thread_safe_region.debug_report();
*constants = thread_safe_region.assigned_constants().clone();
Ok(outputs)
},
)?;
let duration = start_time.elapsed();
trace!("model layout took: {:?}", duration);
Ok(outputs)
}
fn layout_nodes(
&self,
config: &mut ModelConfig,
region: &mut RegionCtx<Fp>,
results: &mut BTreeMap<usize, Vec<ValTensor<Fp>>>,
) -> Result<Vec<ValTensor<Fp>>, Box<dyn Error>> {
let orig_inputs: BTreeMap<usize, _> = results
.clone()
.into_iter()
.filter(|(idx, _)| self.graph.inpu |
ts.contains(idx))
.collect();
for (idx, node) in self.graph.nodes.iter() {
debug!("laying out {}: {}", idx, node.as_str(),);
region.debug_report();
debug!("input indices: {:?}", node.inputs());
debug!("output scales: {:?}", node.out_scales());
debug!(
"input scales: {:?}",
node.inputs()
.iter()
.map(|(idx, outlet)| self.graph.nodes[idx].out_scales()[*outlet])
.collect_vec()
);
let mut values: Vec<ValTensor<Fp>> = if !node.is_input() {
node.inputs()
.iter()
.map(|(idx, outlet)| {
Ok(results.get(idx).ok_or(GraphError::MissingResults)?[*outlet].clone())
})
.collect::<Result<Vec<_>, GraphError>>()?
} else {
vec![results.get(idx).ok_or(GraphError::MissingResults)?[0].clone()]
};
debug!("output dims: {:?}", node.out_dims());
debug!(
"input dims {:?}",
values.iter().map(|v| v.dims()).collect_vec()
);
match &node {
NodeType::Node(n) => {
let res = if node.is_constant() && node.num_uses() == 1 {
log::debug!("node {} is a constant with 1 use", n.idx);
let mut node = n.clone();
let c = node.opkind.get_mutable_constant().ok_or("no constant")?;
Some(c.quantized_values.clone().try_into()?)
} else {
config
.base
.layout(region, &values, n.opkind.clone_dyn())
.map_err(|e| {
error!("{}", e);
halo2_proofs::plonk::Error::Synthesis |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.