text
stringlengths 1
2.05k
|
---|
ed: bool,
pub range: Range,
_marker: PhantomData<F>,
}
impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Table<F> {
pub fn get_col_index(&self, input: F) -> F {
let chunk =
(crate::fieldutils::felt_to_i128(input) - self.range.0).abs() / (self.col_size as i128);
i128_to_felt(chunk)
}
pub fn get_first_element(&self, chunk: usize) -> (F, F) {
let chunk = chunk as i128;
let first_element = i128_to_felt(chunk * (self.col_size as i128) + self.range.0);
let op_f = self
.nonlinearity
.f(&[Tensor::from(vec![first_element].into_iter())])
.unwrap();
(first_element, op_f.output[0])
}
pub fn cal_col_size(logrows: usize, reserved_blinding_rows: usize) -> usize {
2usize.pow(logrows as u32) - reserved_blinding_rows
}
pub fn cal_bit_range(bits: usize, reserved_blinding_rows: usize) -> usize {
2usize.pow(bits as u32) - reserved_blinding_rows
}
}
pub fn num_cols_required(range_len: i128, col_size: usize) -> usize {
(range_len / (col_size as i128)) as usize + 1
}
impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Table<F> {
pub fn configure(
cs: &mut ConstraintSystem<F>,
range: Range,
logrows: usize,
nonlinearity: &LookupOp,
preexisting_inputs: Option<Vec<TableColumn>>,
) -> Table<F> {
let factors = cs.blinding_factors() + RESERVED_BLINDING_ROWS_PAD;
let col_size = Self::cal_col_size(logrows, factors);
let num_cols = num_cols_required((range.1 - range.0).abs(), col_size);
debug!("table range: {:?}", range);
let table_inputs = preexisting_inputs.unwrap_or_else(|| {
let mut cols = vec![];
for _ in 0..num_cols {
cols.push(cs.lookup_table_column());
}
cols
});
let num_cols = table_inputs.len();
if num_cols > 1 { |
warn!("Using {} columns for non-linearity table.", num_cols);
}
let table_outputs = table_inputs
.iter()
.map(|_| cs.lookup_table_column())
.collect::<Vec<_>>();
Table {
nonlinearity: nonlinearity.clone(),
table_inputs,
table_outputs,
is_assigned: false,
selector_constructor: SelectorConstructor::new(num_cols),
col_size,
range,
_marker: PhantomData,
}
}
pub fn cartesian_coord(&self, linear_coord: usize) -> (usize, usize) {
let x = linear_coord / self.col_size;
let y = linear_coord % self.col_size;
(x, y)
}
pub fn layout(
&mut self,
layouter: &mut impl Layouter<F>,
preassigned_input: bool,
) -> Result<(), Box<dyn Error>> {
if self.is_assigned {
return Err(Box::new(CircuitError::TableAlreadyAssigned));
}
let smallest = self.range.0;
let largest = self.range.1;
let inputs: Tensor<F> = Tensor::from(smallest..=largest).map(|x| i128_to_felt(x));
let evals = self.nonlinearity.f(&[inputs.clone()])?;
let chunked_inputs = inputs.chunks(self.col_size);
self.is_assigned = true;
let col_multipliers: Vec<F> = (0..chunked_inputs.len())
.map(|x| self.selector_constructor.get_selector_val_at_idx(x))
.collect();
let _ = chunked_inputs
.enumerate()
.map(|(chunk_idx, inputs)| {
layouter.assign_table(
|| "nl table",
|mut table| {
let _ = inputs
.iter()
.enumerate()
.map(|(mut row_offset, input)| {
let col_multiplier = col_multipliers[chunk_idx];
row_offset += chunk_idx * self.col_size; |
let (x, y) = self.cartesian_coord(row_offset);
if !preassigned_input {
table.assign_cell(
|| format!("nl_i_col row {}", row_offset),
self.table_inputs[x],
y,
|| Value::known(*input * col_multiplier),
)?;
}
let output = evals.output[row_offset];
table.assign_cell(
|| format!("nl_o_col row {}", row_offset),
self.table_outputs[x],
y,
|| Value::known(output * col_multiplier),
)?;
Ok(())
})
.collect::<Result<Vec<()>, halo2_proofs::plonk::Error>>()?;
Ok(())
},
)
})
.collect::<Result<Vec<()>, halo2_proofs::plonk::Error>>()?;
Ok(())
}
}
pub struct RangeCheck<F: PrimeField> {
pub inputs: Vec<TableColumn>,
pub col_size: usize,
pub selector_constructor: SelectorConstructor<F>,
pub is_assigned: bool,
pub range: Range,
_marker: PhantomData<F>,
}
impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RangeCheck<F> {
pub fn get_first_element(&self, chunk: usize) -> F {
let chunk = chunk as i128;
i128_to_felt(chunk * (self.col_size as i128) + self.range.0)
}
pub fn cal_col_size(logrows: usize, reserved_blinding_rows: usize) -> usize {
2usize.pow(logrows as u32) - reserved_blinding_rows
}
pub fn cal_bit_range(bits: usize, reserved_blinding_rows: usize) -> usize {
2usize.pow(bits |
as u32) - reserved_blinding_rows
}
pub fn get_col_index(&self, input: F) -> F {
let chunk =
(crate::fieldutils::felt_to_i128(input) - self.range.0).abs() / (self.col_size as i128);
i128_to_felt(chunk)
}
}
impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RangeCheck<F> {
pub fn configure(cs: &mut ConstraintSystem<F>, range: Range, logrows: usize) -> RangeCheck<F> {
log::debug!("range check range: {:?}", range);
let factors = cs.blinding_factors() + RESERVED_BLINDING_ROWS_PAD;
let col_size = Self::cal_col_size(logrows, factors);
let num_cols = num_cols_required((range.1 - range.0).abs(), col_size);
let inputs = {
let mut cols = vec![];
for _ in 0..num_cols {
cols.push(cs.lookup_table_column());
}
cols
};
let num_cols = inputs.len();
if num_cols > 1 {
warn!("Using {} columns for range-check.", num_cols);
}
RangeCheck {
inputs,
col_size,
is_assigned: false,
selector_constructor: SelectorConstructor::new(num_cols),
range,
_marker: PhantomData,
}
}
pub fn cartesian_coord(&self, linear_coord: usize) -> (usize, usize) {
let x = linear_coord / self.col_size;
let y = linear_coord % self.col_size;
(x, y)
}
pub fn layout(&mut self, layouter: &mut impl Layouter<F>) -> Result<(), Box<dyn Error>> {
if self.is_assigned {
return Err(Box::new(CircuitError::TableAlreadyAssigned));
}
let smallest = self.range.0;
let largest = self.range.1;
let inputs: Tensor<F> = Tensor::from(smallest..=largest).map(|x| i128_to_felt(x));
let chunked_inputs = inputs.chunks(self.col_size);
self.is_assigned = true;
let col_multipliers: Vec<F> = (0..chunked_inputs.len())
.map(|x| self.se |
lector_constructor.get_selector_val_at_idx(x))
.collect();
let _ = chunked_inputs
.enumerate()
.map(|(chunk_idx, inputs)| {
layouter.assign_table(
|| "range check table",
|mut table| {
let _ = inputs
.iter()
.enumerate()
.map(|(mut row_offset, input)| {
let col_multiplier = col_multipliers[chunk_idx];
row_offset += chunk_idx * self.col_size;
let (x, y) = self.cartesian_coord(row_offset);
table.assign_cell(
|| format!("rc_i_col row {}", row_offset),
self.inputs[x],
y,
|| Value::known(*input * col_multiplier),
)?;
Ok(())
})
.collect::<Result<Vec<()>, halo2_proofs::plonk::Error>>()?;
Ok(())
},
)
})
.collect::<Result<Vec<()>, halo2_proofs::plonk::Error>>()?;
Ok(())
}
} |
use crate::circuit::ops::poly::PolyOp;
use crate::circuit::*;
use crate::tensor::{Tensor, TensorType, ValTensor, VarTensor};
use halo2_proofs::{
circuit::{Layouter, SimpleFloorPlanner, Value},
dev::MockProver,
plonk::{Circuit, ConstraintSystem, Error},
};
use halo2curves::bn256::Fr as F;
use halo2curves::ff::{Field, PrimeField};
use ops::lookup::LookupOp;
use ops::region::RegionCtx;
use rand::rngs::OsRng;
use std::marker::PhantomData;
struct TestParams;
mod matmul {
use super::*;
const K: usize = 9;
const LEN: usize = 3;
struct MatmulCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 2],
_marker: PhantomData<F>,
}
impl Circuit<F> for MatmulCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN * LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN * LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN * LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(
&mut region,
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "ij,jk->ik".to_string(),
}),
)
.map_err(|_| Err |
or::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn matmulcircuit() {
let mut a =
Tensor::from((0..(LEN + 1) * LEN).map(|i| Value::known(F::from((i + 1) as u64))));
a.reshape(&[LEN, LEN + 1]).unwrap();
let mut w = Tensor::from((0..LEN + 1).map(|i| Value::known(F::from((i + 1) as u64))));
w.reshape(&[LEN + 1, 1]).unwrap();
let circuit = MatmulCircuit::<F> {
inputs: [ValTensor::from(a), ValTensor::from(w)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod matmul_col_overflow_double_col {
use super::*;
const K: usize = 5;
const LEN: usize = 6;
const NUM_INNER_COLS: usize = 2;
struct MatmulCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 2],
_marker: PhantomData<F>,
}
impl Circuit<F> for MatmulCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, NUM_INNER_COLS, LEN * LEN * LEN);
let b = VarTensor::new_advice(cs, K, NUM_INNER_COLS, LEN * LEN * LEN);
let output = VarTensor::new_advice(cs, K, NUM_INNER_COLS, LEN * LEN * LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, NUM_INNER_COLS);
config
.layout(
&mut region,
&sel |
f.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "ij,jk->ik".to_string(),
}),
)
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn matmulcircuit() {
let mut a = Tensor::from((0..LEN * LEN).map(|i| Value::known(F::from((i + 1) as u64))));
a.reshape(&[LEN, LEN]).unwrap();
let mut w = Tensor::from((0..LEN).map(|i| Value::known(F::from((i + 1) as u64))));
w.reshape(&[LEN, 1]).unwrap();
let circuit = MatmulCircuit::<F> {
inputs: [ValTensor::from(a), ValTensor::from(w)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod matmul_col_overflow {
use super::*;
const K: usize = 5;
const LEN: usize = 6;
struct MatmulCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 2],
_marker: PhantomData<F>,
}
impl Circuit<F> for MatmulCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(
&mut region,
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "ij,jk-> |
ik".to_string(),
}),
)
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn matmulcircuit() {
let mut a = Tensor::from((0..LEN * LEN).map(|i| Value::known(F::from((i + 1) as u64))));
a.reshape(&[LEN, LEN]).unwrap();
let mut w = Tensor::from((0..LEN).map(|i| Value::known(F::from((i + 1) as u64))));
w.reshape(&[LEN, 1]).unwrap();
let circuit = MatmulCircuit::<F> {
inputs: [ValTensor::from(a), ValTensor::from(w)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod matmul_col_ultra_overflow_double_col {
use halo2_proofs::poly::kzg::{
commitment::KZGCommitmentScheme,
multiopen::{ProverSHPLONK, VerifierSHPLONK},
strategy::SingleStrategy,
};
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
use super::*;
const K: usize = 4;
const LEN: usize = 20;
const NUM_INNER_COLS: usize = 2;
struct MatmulCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 2],
_marker: PhantomData<F>,
}
impl Circuit<F> for MatmulCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, NUM_INNER_COLS, LEN * LEN * LEN);
let b = VarTensor::new_advice(cs, K, NUM_INNER_COLS, LEN * LEN * LEN);
let output = VarTensor::new_advice(cs, K, NUM_INNER_COLS, LEN * LEN * LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| { |
let mut region = RegionCtx::new(region, 0, NUM_INNER_COLS);
config
.layout(
&mut region,
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "ij,jk->ik".to_string(),
}),
)
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn matmulcircuit() {
crate::logger::init_logger();
let mut a = Tensor::from((0..LEN * LEN).map(|i| Value::known(F::from((i + 1) as u64))));
a.reshape(&[LEN, LEN]).unwrap();
let mut w = Tensor::from((0..LEN).map(|i| Value::known(F::from((i + 1) as u64))));
w.reshape(&[LEN, 1]).unwrap();
let circuit = MatmulCircuit::<F> {
inputs: [ValTensor::from(a), ValTensor::from(w)],
_marker: PhantomData,
};
let params = crate::pfsys::srs::gen_srs::<
halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme<_>,
>(K as u32);
let pk = crate::pfsys::create_keys::<
halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme<halo2curves::bn256::Bn256>,
MatmulCircuit<F>,
>(&circuit, ¶ms, true)
.unwrap();
let prover = crate::pfsys::create_proof_circuit::<
KZGCommitmentScheme<_>,
_,
ProverSHPLONK<_>,
VerifierSHPLONK<_>,
SingleStrategy<_>,
_,
EvmTranscript<_, _, _, _>,
EvmTranscript<_, _, _, _>,
>(
circuit.clone(),
vec![],
¶ms,
&pk,
CheckMode::SAFE,
crate::Commitments::KZG,
crate::pfsys::TranscriptType::EVM,
None,
None,
);
assert!(prover.is_ok());
}
}
mod matmul_col_ultra_overflow {
use halo2_proofs::poly::kzg::{
commitment::KZGCommitmentScheme,
multiopen::{ProverSHPLONK, VerifierSHPLONK},
strategy::SingleStrategy,
};
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
use super::*;
const K: usize = 4;
const LEN: usize = 20;
struct MatmulCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 2],
_marker: PhantomData<F>,
}
impl Circuit<F> for MatmulCircuit<F> {
type Config = BaseC |
onfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(
&mut region,
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "ij,jk->ik".to_string(),
}),
)
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn matmulcircuit() {
crate::logger::init_logger();
let mut a = Tensor::from((0..LEN * LEN).map(|i| Value::known(F::from((i + 1) as u64))));
a.reshape(&[LEN, LEN]).unwrap();
let mut w = Tensor::from((0..LEN).map(|i| Value::known(F::from((i + 1) as u64))));
w.reshape(&[LEN, 1]).unwrap();
let circuit = MatmulCircuit::<F> {
inputs: [ValTensor::from(a), ValTensor::from(w)],
_marker: PhantomData,
};
let params = crate::pfsys::srs::gen_srs::<
halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme<_>,
>(K as u32);
let pk = crate::pfsys::create_keys::<
halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme<halo2curves::bn256::Bn256>,
MatmulCircuit<F>,
>(&circuit, ¶ms, true)
.unwrap();
let prover = crate::pfsys::create_proof_circuit::<
KZGCommitmentScheme<_>,
_,
ProverSHPLONK<_>,
VerifierSHPLONK<_>,
SingleStrategy<_>,
_,
EvmTranscript<_, _, _, _>,
EvmTranscript<_, _, _, _>,
>(
circuit.clone(),
vec![],
¶ms,
&pk,
CheckMode::SAFE,
crate::Commitments::KZG,
crate::pfsys::TranscriptType::EVM,
None,
None,
);
assert!(prover.is_ok());
}
}
mod dot {
use ops::poly::PolyOp;
use super::*;
const K: usize = 4;
const LEN: usize = 4;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 2],
_marker: PhantomData<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config { |
let a = VarTensor::new_advice(cs, K, 1, LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(
&mut region,
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "i,i->".to_string(),
}),
)
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn dotcircuit() {
let a = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let b = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let circuit = MyCircuit::<F> {
inputs: [ValTensor::from(a), ValTensor::from(b)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod dot_col_overflow_triple_col {
use super::*;
const K: usize = 4;
const LEN: usize = 50;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 2],
_marker: PhantomData<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let _fixed = cs.fixed_column();
cs.enable_constant(_fixed);
let a = VarTensor::new_advice(cs, K, 3, LEN);
let b = VarTensor::new_advice(cs, K, 3, LEN);
let output = VarTensor::new_advice(cs, K, 3, LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 3);
config
.layout(
&mut region,
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "i,i->".to_string(), |
}),
)
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn dotcircuit() {
let a = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let b = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let circuit = MyCircuit::<F> {
inputs: [ValTensor::from(a), ValTensor::from(b)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod dot_col_overflow {
use super::*;
const K: usize = 4;
const LEN: usize = 50;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 2],
_marker: PhantomData<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(
&mut region,
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "i,i->".to_string(),
}),
)
.map_err(|_| Error::Synthesis) |
},
)
.unwrap();
Ok(())
}
} |
fn dotcircuit() {
let a = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let b = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let circuit = MyCircuit::<F> {
inputs: [ValTensor::from(a), ValTensor::from(b)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod sum {
use super::*;
const K: usize = 4;
const LEN: usize = 4;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 1],
_marker: PhantomData<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(
&mut region,
&self.inputs.clone(),
Box::new(PolyOp::Sum { axes: vec![0] }),
)
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn sumcircuit() {
let a = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let circuit = MyCircuit::<F> {
inputs: [ValTensor::from(a)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod sum_col_overflow_double_col {
use super::*;
const K: usize = 4;
const LEN: usize = 20;
const NUM_INNER_COLS: usize = 2;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 1],
_marker: PhantomData<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, NUM_INNER_COLS, LEN);
let b = VarTensor::new_advice(cs, K, NUM_INNER_COLS, LEN);
let output = VarTensor::new_advice(cs, K, NUM_INNER_COLS, LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, NUM_INNER_COLS);
config
.layout(
&mut region,
&self.inputs.clone(),
Box::new(PolyOp::Sum { axes: vec![0] }),
)
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn sumcircuit() {
let a = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let circuit = MyCircuit::<F> {
inputs: [ValTensor::from(a)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod sum_col_overflow {
use super::*;
const K: usize = 4;
const LEN: usize = 20;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 1],
_marker: PhantomData<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(
&mut region,
&self.inputs.clone(),
Box::new(PolyOp::Sum { axes: vec![0] }),
)
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn sumcircuit() {
let a = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let circuit = MyCircuit::<F> {
inputs: [ValTensor::from(a)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod composition {
use super::*;
const K: usize = 9;
const LEN: usize = 4;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 2],
_marker: PhantomData<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
let _ = config
.layout(
&mut region,
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "i,i->".to_string(),
}),
)
.unwrap();
let _ = config
.layout(
&mut region, |
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "i,i->".to_string(),
}),
)
.unwrap();
config
.layout(
&mut region,
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "i,i->".to_string(),
}),
)
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn dotcircuit() {
let a = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let b = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let circuit = MyCircuit::<F> {
inputs: [ValTensor::from(a), ValTensor::from(b)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod conv {
use super::*;
const K: usize = 22;
const LEN: usize = 100;
struct ConvCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: Vec<ValTensor<F>>,
_marker: PhantomData<F>,
}
impl Circuit<F> for ConvCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, (LEN + 1) * LEN);
let b = VarTensor::new_advice(cs, K, 1, (LEN + 1) * LEN);
let output = VarTensor::new_advice(cs, K, 1, (LEN + 1) * LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(
&mut region,
&self.inputs,
Box::new(PolyOp::Conv {
padding: vec![(1, 1); 2],
stride: vec![2; 2],
}),
) |
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn convcircuit() {
let kernel_height = 2;
let kernel_width = 3;
let image_height = 5;
let image_width = 7;
let in_channels = 3;
let out_channels = 2;
let mut image =
Tensor::from((0..in_channels * image_height * image_width).map(|_| F::random(OsRng)));
image
.reshape(&[1, in_channels, image_height, image_width])
.unwrap();
image.set_visibility(&crate::graph::Visibility::Private);
let image = ValTensor::try_from(image).unwrap();
let mut kernels = Tensor::from(
(0..{ out_channels * in_channels * kernel_height * kernel_width })
.map(|_| F::random(OsRng)),
);
kernels
.reshape(&[out_channels, in_channels, kernel_height, kernel_width])
.unwrap();
kernels.set_visibility(&crate::graph::Visibility::Private);
let kernels = ValTensor::try_from(kernels).unwrap();
let mut bias = Tensor::from((0..{ out_channels }).map(|_| F::random(OsRng)));
bias.set_visibility(&crate::graph::Visibility::Private);
let bias = ValTensor::try_from(bias).unwrap();
let circuit = ConvCircuit::<F> {
inputs: [image, kernels, bias].to_vec(),
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
} |
fn convcircuitnobias() {
let kernel_height = 2;
let kernel_width = 2;
let image_height = 4;
let image_width = 5;
let in_channels = 3;
let out_channels = 2;
let mut image =
Tensor::from((0..in_channels * image_height * image_width).map(|i| F::from(i as u64)));
image
.reshape(&[1, in_channels, image_height, image_width])
.unwrap();
image.set_visibility(&crate::graph::Visibility::Private);
let mut kernels = Tensor::from(
(0..{ out_channels * in_channels * kernel_height * kernel_width })
.map(|i| F::from(i as u64)),
);
kernels
.reshape(&[out_channels, in_channels, kernel_height, kernel_width])
.unwrap();
kernels.set_visibility(&crate::graph::Visibility::Private);
let image = ValTensor::try_from(image).unwrap();
let kernels = ValTensor::try_from(kernels).unwrap();
let circuit = ConvCircuit::<F> {
inputs: [image, kernels].to_vec(),
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod conv_col_ultra_overflow {
use halo2_proofs::poly::{
kzg::strategy::SingleStrategy,
kzg::{
commitment::KZGCommitmentScheme,
multiopen::{ProverSHPLONK, VerifierSHPLONK},
},
};
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
use super::*;
const K: usize = 4;
const LEN: usize = 28;
struct ConvCircuit<F: PrimeField + TensorType + PartialOrd> {
image: ValTensor<F>,
kernel: ValTensor<F>,
_marker: PhantomData<F>,
}
impl Circuit<F> for ConvCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn |
configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(
&mut region,
&[self.image.clone(), self.kernel.clone()],
Box::new(PolyOp::Conv {
padding: vec![(1, 1); 2],
stride: vec![2; 2],
}),
)
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn conv_circuit() {
let kernel_height = 2;
let kernel_width = 2;
let image_height = LEN;
let image_width = LEN;
let in_channels = 3;
let out_channels = 2;
crate::logger::init_logger();
let mut image =
Tensor::from((0..in_channels * image_height * image_width).map(|i| F::from(i as u64)));
image
.reshape(&[1, in_channels, image_height, image_width])
.unwrap();
image.set_visibility(&crate::graph::Visibility::Private);
let mut kernels = Tensor::from(
(0..{ out_channels * in_channels * kernel_height * kernel_width })
.map(|i| F::from(i as u64)),
);
kernels
.reshape(&[out_channels, in_channels, kernel_height, kernel_width])
.unwrap();
kernels.set_visibility(&crate::graph::Visibility::Private);
let circuit = ConvCircuit::<F> {
image: ValTensor::try_from(image).unwrap(),
kernel: ValTensor::try_from(kernels).unwrap(),
_marker: PhantomData,
};
let params = crate::pfsys::srs::gen_srs::<
halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme<_>,
>(K as u32);
let pk = crate::pfsys::create_keys::<
halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme<halo2curves::bn256::Bn256>,
ConvCircuit<F>,
>(&circuit, ¶ms, true)
.unwrap();
let prover = crate::pfsys::create_proof_circuit::<
KZGCommitmentScheme<_>,
_,
ProverSHPLONK<_>,
VerifierSHPLONK<_>,
SingleStrategy<_>,
_,
EvmTranscript<_, _, _, _>,
EvmTranscript<_, _, _, _>,
>(
circuit.clone(),
vec![],
¶ms,
&pk,
CheckMode::SAFE,
crate::Commitments::KZG,
crate::pfsys::TranscriptType::EVM,
None,
None, |
);
assert!(prover.is_ok());
}
}
mod conv_relu_col_ultra_overflow {
use halo2_proofs::poly::kzg::{
commitment::KZGCommitmentScheme,
multiopen::{ProverSHPLONK, VerifierSHPLONK},
strategy::SingleStrategy,
};
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
use super::*;
const K: usize = 4;
const LEN: usize = 28;
struct ConvCircuit<F: PrimeField + TensorType + PartialOrd> {
image: ValTensor<F>,
kernel: ValTensor<F>,
_marker: PhantomData<F>,
}
impl Circuit<F> for ConvCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN);
let mut base_config =
Self::Config::configure(cs, &[a.clone(), b.clone()], &output, CheckMode::SAFE);
base_config
.configure_lookup(cs, &b, &output, &a, (-3, 3), K, &LookupOp::ReLU)
.unwrap();
base_config.clone()
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
config.layout_tables(&mut layouter).unwrap();
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
let output = config
.layout(
&mut region,
&[self.image.clone(), self.kernel.clone()], |
Box::new(PolyOp::Conv {
padding: vec![(1, 1); 2],
stride: vec![2; 2],
}),
)
.map_err(|_| Error::Synthesis);
let _output = config
.layout(
&mut region,
&[output.unwrap().unwrap()],
Box::new(LookupOp::ReLU),
)
.unwrap();
Ok(())
},
)
.unwrap();
Ok(())
}
} |
fn conv_relu_circuit() {
let kernel_height = 2;
let kernel_width = 2;
let image_height = LEN;
let image_width = LEN;
let in_channels = 3;
let out_channels = 2;
crate::logger::init_logger();
let mut image =
Tensor::from((0..in_channels * image_height * image_width).map(|_| F::from(0)));
image
.reshape(&[1, in_channels, image_height, image_width])
.unwrap();
image.set_visibility(&crate::graph::Visibility::Private);
let mut kernels = Tensor::from(
(0..{ out_channels * in_channels * kernel_height * kernel_width }).map(|_| F::from(0)),
);
kernels
.reshape(&[out_channels, in_channels, kernel_height, kernel_width])
.unwrap();
kernels.set_visibility(&crate::graph::Visibility::Private);
let circuit = ConvCircuit::<F> {
image: ValTensor::try_from(image).unwrap(),
kernel: ValTensor::try_from(kernels).unwrap(),
_marker: PhantomData,
};
let params = crate::pfsys::srs::gen_srs::<
halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme<_>,
>(K as u32);
let pk = crate::pfsys::create_keys::<
halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme<halo2curves::bn256::Bn256>,
ConvCircuit<F>,
>(&circuit, ¶ms, true)
.unwrap();
let prover = crate::pfsys::create_proof_circuit::<
KZGCommitmentScheme<_>,
_,
ProverSHPLONK<_>,
VerifierSHPLONK<_>,
SingleStrategy<_>,
_,
EvmTranscript<_, _, _, _>,
EvmTranscript<_, _, _, _>,
>(
circuit.clone(),
vec![],
¶ms,
&pk,
CheckMode::SAFE,
crate::Commitments::KZG,
crate::pfsys::TranscriptType::EVM,
None,
None,
);
assert |
!(prover.is_ok());
}
}
mod add_w_shape_casting {
use super::*;
const K: usize = 4;
const LEN: usize = 4;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 2],
_marker: PhantomData<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Add))
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn addcircuit() {
let a = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let b = Tensor::from((0..1).map(|i| Value::known(F::from(i as u64 + 1))));
let circuit = MyCircuit::<F> {
inputs: [ValTensor::from(a), ValTensor::from(b)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod add {
use super::*;
const K: usize = 4;
const LEN: usize = 4;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 2],
_marker: PhantomData<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Add))
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn addcircuit() {
let a = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let b = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let circuit = MyCircuit::<F> {
inputs: [ValTensor::from(a), ValTensor::from(b)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod dynamic_lookup {
use super::*;
const K: usize = 6;
const LEN: usize = 4;
const NUM_LOOP: usize = 5;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
tables: [[ValTensor<F>; 2]; NUM_LOOP],
lookups: [[ValTensor<F>; 2]; NUM_LOOP],
_marker: PhantomData<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 2, LEN);
let b = VarTensor::new_advice(cs, K, 2, LEN);
let c: VarTensor = VarTensor::new_advice(cs, K, 2, LEN);
let d = VarTensor::new_advice(cs, K, 1, LEN);
let e = VarTensor::new_advice(cs, K, 1, LEN);
let f: VarTensor = VarTensor::new_advice(cs, K, 1, LEN);
let _constant = VarTensor::constant_cols(cs, K, LEN * NUM_LOOP, false);
let mut config =
Self::Config::configure(cs, &[a.clone(), b.clone()], &c, CheckMode::SAFE);
config
.configure_dynamic_lookup(
cs,
&[a.clone(), b.clone(), c.clone()],
&[d.clone(), e.clone(), f.clone()],
)
.unwrap();
config
}
fn synthesize(
&self,
config: Self::Config,
mut layo |
uter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
for i in 0..NUM_LOOP {
layouts::dynamic_lookup(
&config,
&mut region,
&self.lookups[i],
&self.tables[i],
)
.map_err(|_| Error::Synthesis)?;
}
assert_eq!(
region.dynamic_lookup_col_coord(),
NUM_LOOP * self.tables[0][0].len()
);
assert_eq!(region.dynamic_lookup_index(), NUM_LOOP);
Ok(())
},
)
.unwrap();
Ok(())
}
} |
fn dynamiclookupcircuit() {
let tables = (0..NUM_LOOP)
.map(|loop_idx| {
[
ValTensor::from(Tensor::from(
(0..LEN).map(|i| Value::known(F::from((i * loop_idx) as u64 + 1))),
)),
ValTensor::from(Tensor::from(
(0..LEN).map(|i| Value::known(F::from((loop_idx * i * i) as u64 + 1))),
)),
]
})
.collect::<Vec<_>>();
let lookups = (0..NUM_LOOP)
.map(|loop_idx| {
[
ValTensor::from(Tensor::from(
(0..3).map(|i| Value::known(F::from((i * loop_idx) as u64 + 1))),
)),
ValTensor::from(Tensor::from(
(0..3).map(|i| Value::known(F::from((loop_idx * i * i) as u64 + 1))),
)),
]
})
.collect::<Vec<_>>();
let circuit = MyCircuit::<F> {
tables: tables.clone().try_into().unwrap(),
lookups: lookups.try_into().unwrap(),
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
let lookups = (0..NUM_LOOP)
.map(|loop_idx| {
let prev_idx = if loop_idx == 0 {
NUM_LOOP - 1
} else {
loop_idx - 1
};
[
ValTensor::from(Tensor::from(
(0..3).map(|i| Value::known(F::from((i * prev_idx) as u64 + 1))),
)),
ValTensor::from(Tensor::from(
(0..3).map(|i| Value::known(F::from((prev_idx * i * i) as u64 + 1))),
)),
]
})
.collect::<Vec<_>>();
let circuit = MyCircuit::<F> {
tables: tables.try_into().unwrap(), |
lookups: lookups.try_into().unwrap(),
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
assert!(prover.verify().is_err());
}
}
mod shuffle {
use super::*;
const K: usize = 6;
const LEN: usize = 4;
const NUM_LOOP: usize = 5;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [[ValTensor<F>; 1]; NUM_LOOP],
references: [[ValTensor<F>; 1]; NUM_LOOP],
_marker: PhantomData<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 2, LEN);
let b = VarTensor::new_advice(cs, K, 2, LEN);
let c: VarTensor = VarTensor::new_advice(cs, K, 2, LEN);
let d = VarTensor::new_advice(cs, K, 1, LEN);
let e = VarTensor::new_advice(cs, K, 1, LEN);
let _constant = VarTensor::constant_cols(cs, K, LEN * NUM_LOOP, false);
let mut config =
Self::Config::configure(cs, &[a.clone(), b.clone()], &c, CheckMode::SAFE);
config
.configure_shuffles(cs, &[a.clone(), b.clone()], &[d.clone(), e.clone()])
.unwrap();
config
}
fn synthesize(
&self,
config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
for i in 0..NUM_LOOP {
layouts::shuffles(
&config,
&mut region, |
&self.inputs[i],
&self.references[i],
)
.map_err(|_| Error::Synthesis)?;
}
assert_eq!(
region.shuffle_col_coord(),
NUM_LOOP * self.references[0][0].len()
);
assert_eq!(region.shuffle_index(), NUM_LOOP);
Ok(())
},
)
.unwrap();
Ok(())
}
} |
fn shufflecircuit() {
let references = (0..NUM_LOOP)
.map(|loop_idx| {
[ValTensor::from(Tensor::from((0..LEN).map(|i| {
Value::known(F::from((i * loop_idx) as u64 + 1))
})))]
})
.collect::<Vec<_>>();
let inputs = (0..NUM_LOOP)
.map(|loop_idx| {
[ValTensor::from(Tensor::from((0..LEN).rev().map(|i| {
Value::known(F::from((i * loop_idx) as u64 + 1))
})))]
})
.collect::<Vec<_>>();
let circuit = MyCircuit::<F> {
references: references.clone().try_into().unwrap(),
inputs: inputs.try_into().unwrap(),
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
let inputs = (0..NUM_LOOP)
.map(|loop_idx| {
let prev_idx = if loop_idx == 0 {
NUM_LOOP - 1
} else {
loop_idx - 1
};
[ValTensor::from(Tensor::from((0..LEN).rev().map(|i| {
Value::known(F::from((i * prev_idx) as u64 + 1))
})))]
})
.collect::<Vec<_>>();
let circuit = MyCircuit::<F> {
references: references.try_into().unwrap(),
inputs: inputs.try_into().unwrap(),
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
assert!(prover.verify().is_err());
}
}
mod add_with_overflow {
use super::*;
const K: usize = 4;
const LEN: usize = 50;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 2],
_marker: PhantomData<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams; |
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Add))
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn addcircuit() {
let a = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let b = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let circuit = MyCircuit::<F> {
inputs: [ValTensor::from(a), ValTensor::from(b)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod add_with_overflow_and_poseidon {
use std::collections::HashMap;
use halo2curves::bn256::Fr;
use crate::circuit::modules::{
poseidon::{
spec::{PoseidonSpec, POSEIDON_RATE, POSEIDON_WIDTH},
PoseidonChip, PoseidonConfig,
},
Module, ModulePlanner,
};
use super::*;
const K: usize = 15;
const LEN: usize = 50;
const WIDTH: usize = POSEIDON_WIDTH;
const RATE: usize = POSEIDON_RATE; |
struct MyCircuitConfig {
base: BaseConfig<Fr>,
poseidon: PoseidonConfig<WIDTH, RATE>,
} |
struct MyCircuit {
inputs: [ValTensor<Fr>; 2],
}
impl Circuit<Fr> for MyCircuit {
type Config = MyCircuitConfig;
type FloorPlanner = ModulePlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<Fr>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN);
let base = BaseConfig::configure(cs, &[a, b], &output, CheckMode::SAFE);
VarTensor::constant_cols(cs, K, 2, false);
let poseidon = PoseidonChip::<PoseidonSpec, WIDTH, RATE, WIDTH>::configure(cs, ());
MyCircuitConfig { base, poseidon }
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<Fr>,
) -> Result<(), Error> {
let poseidon_chip: PoseidonChip<PoseidonSpec, WIDTH, RATE, WIDTH> =
PoseidonChip::new(config.poseidon.clone());
let assigned_inputs_a =
poseidon_chip.layout(&mut layouter, &self.inputs[0..1], 0, &mut HashMap::new())?;
let assigned_inputs_b =
poseidon_chip.layout(&mut layouter, &self.inputs[1..2], 1, &mut HashMap::new())?;
layouter.assign_region(|| "_new_module", |_| Ok(()))?;
let inputs = vec![assigned_inputs_a, assigned_inputs_b];
layouter.assign_region(
|| "model",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.base
.layout(&mut region, &inputs, Box::new(PolyOp::Add))
.map_err(|_| Error::Synthesis)
},
)?;
Ok(())
}
} |
fn addcircuit() {
let a = (0..LEN)
.map(|i| halo2curves::bn256::Fr::from(i as u64 + 1))
.collect::<Vec<_>>();
let b = (0..LEN)
.map(|i| halo2curves::bn256::Fr::from(i as u64 + 1))
.collect::<Vec<_>>();
let commitment_a =
PoseidonChip::<PoseidonSpec, WIDTH, RATE, WIDTH>::run(a.clone()).unwrap()[0][0];
let commitment_b =
PoseidonChip::<PoseidonSpec, WIDTH, RATE, WIDTH>::run(b.clone()).unwrap()[0][0];
let a = Tensor::from(a.into_iter().map(Value::known));
let b = Tensor::from(b.into_iter().map(Value::known));
let circuit = MyCircuit {
inputs: [ValTensor::from(a), ValTensor::from(b)],
};
let prover =
MockProver::run(K as u32, &circuit, vec![vec![commitment_a, commitment_b]]).unwrap();
prover.assert_satisfied();
} |
fn addcircuit_bad_hashes() {
let a = (0..LEN)
.map(|i| halo2curves::bn256::Fr::from(i as u64 + 1))
.collect::<Vec<_>>();
let b = (0..LEN)
.map(|i| halo2curves::bn256::Fr::from(i as u64 + 1))
.collect::<Vec<_>>();
let commitment_a = PoseidonChip::<PoseidonSpec, WIDTH, RATE, WIDTH>::run(a.clone())
.unwrap()[0][0]
+ Fr::one();
let commitment_b = PoseidonChip::<PoseidonSpec, WIDTH, RATE, WIDTH>::run(b.clone())
.unwrap()[0][0]
+ Fr::one();
let a = Tensor::from(a.into_iter().map(Value::known));
let b = Tensor::from(b.into_iter().map(Value::known));
let circuit = MyCircuit {
inputs: [ValTensor::from(a), ValTensor::from(b)],
};
let prover =
MockProver::run(K as u32, &circuit, vec![vec![commitment_a, commitment_b]]).unwrap();
assert!(prover.verify().is_err());
}
}
mod sub {
use super::*;
const K: usize = 4;
const LEN: usize = 4;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 2],
_marker: PhantomData<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region |
| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Sub))
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn subcircuit() {
let a = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let b = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let circuit = MyCircuit::<F> {
inputs: [ValTensor::from(a), ValTensor::from(b)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod mult {
use super::*;
const K: usize = 4;
const LEN: usize = 4;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 2],
_marker: PhantomData<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Mult))
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn multcircuit() {
let a = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let b = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let circuit = MyCircuit::<F> {
inputs: [ValTensor::from(a), ValTensor::from(b)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod pow {
use super::*;
const K: usize = 8;
const LEN: usize = 4;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 1],
_marker: PhantomData<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN);
Self::Config::configure(cs, &[a, b], &output, CheckMode::SAFE)
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Pow(5)))
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn powcircuit() {
let a = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let circuit = MyCircuit::<F> {
inputs: [ValTensor::from(a)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod matmul_relu {
use super::*;
const K: usize = 18;
const LEN: usize = 32;
use crate::circuit::lookup::LookupOp;
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; 2],
_marker: PhantomData<F>,
}
struct MyConfig<F: PrimeField + TensorType + PartialOrd> {
base_config: BaseConfig<F>,
}
impl Circuit<F> for MyCircuit<F> {
type Config = MyConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let a = VarTensor::new_advice(cs, K, 1, LEN);
let b = VarTensor::new_advice(cs, K, 1, LEN);
let output = VarTensor::new_advice(cs, K, 1, LEN);
let mut base_config =
BaseConfig::configure(cs, &[a.clone(), b.clone()], &output, CheckMode::SAFE);
base_config
.configure_lookup(cs, &b, &output, &a, (-32768, 32768), K, &LookupOp::ReLU)
.unwrap();
MyConfig { base_config }
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
config.base_config.layout_tables(&mut layouter).unwrap();
layouter.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
let op = PolyOp::Einsum {
equation: "ij,jk->ik".to_ |
string(),
};
let output = config
.base_config
.layout(&mut region, &self.inputs, Box::new(op))
.unwrap();
let _output = config
.base_config
.layout(&mut region, &[output.unwrap()], Box::new(LookupOp::ReLU))
.unwrap();
Ok(())
},
)?;
Ok(())
}
} |
fn matmulrelucircuit() {
let mut a = Tensor::from((0..LEN * LEN).map(|_| Value::known(F::from(1))));
a.reshape(&[LEN, LEN]).unwrap();
let mut b = Tensor::from((0..LEN).map(|_| Value::known(F::from(1))));
b.reshape(&[LEN, 1]).unwrap();
let circuit = MyCircuit {
inputs: [ValTensor::from(a), ValTensor::from(b)],
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod relu {
use super::*;
use halo2_proofs::{
circuit::{Layouter, SimpleFloorPlanner, Value},
dev::MockProver,
plonk::{Circuit, ConstraintSystem, Error},
};
struct ReLUCircuit<F: PrimeField + TensorType + PartialOrd> {
pub input: ValTensor<F>,
}
impl Circuit<F> for ReLUCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let advices = (0..3)
.map(|_| VarTensor::new_advice(cs, 4, 1, 3))
.collect::<Vec<_>>();
let nl = LookupOp::ReLU;
let mut config = BaseConfig::default();
config
.configure_lookup(cs, &advices[0], &advices[1], &advices[2], (-6, 6), 4, &nl)
.unwrap();
config
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
config.layout_tables(&mut layouter).unwrap();
layouter
.assign_region(
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(&mut region, &[self.inpu |
t.clone()], Box::new(LookupOp::ReLU))
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn relucircuit() {
let input: Tensor<Value<F>> =
Tensor::new(Some(&[Value::<F>::known(F::from(1_u64)); 4]), &[4]).unwrap();
let circuit = ReLUCircuit::<F> {
input: ValTensor::from(input),
};
let prover = MockProver::run(4_u32, &circuit, vec![]).unwrap();
prover.assert_satisfied();
}
}
mod lookup_ultra_overflow {
use super::*;
use halo2_proofs::{
circuit::{Layouter, SimpleFloorPlanner, Value},
plonk::{Circuit, ConstraintSystem, Error},
poly::kzg::{
commitment::KZGCommitmentScheme,
multiopen::{ProverSHPLONK, VerifierSHPLONK},
strategy::SingleStrategy,
},
};
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
struct ReLUCircuit<F: PrimeField + TensorType + PartialOrd> {
pub input: ValTensor<F>,
}
impl Circuit<F> for ReLUCircuit<F> {
type Config = BaseConfig<F>;
type FloorPlanner = SimpleFloorPlanner;
type Params = TestParams;
fn without_witnesses(&self) -> Self {
self.clone()
}
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
let advices = (0..3)
.map(|_| VarTensor::new_advice(cs, 4, 1, 3))
.collect::<Vec<_>>();
let nl = LookupOp::ReLU;
let mut config = BaseConfig::default();
config
.configure_lookup(
cs,
&advices[0],
&advices[1],
&advices[2],
(-1024, 1024),
4,
&nl,
)
.unwrap();
config
}
fn synthesize(
&self,
mut config: Self::Config,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
config.layout_tables(&mut layouter).unwrap();
layouter
.assign_region( |
|| "",
|region| {
let mut region = RegionCtx::new(region, 0, 1);
config
.layout(&mut region, &[self.input.clone()], Box::new(LookupOp::ReLU))
.map_err(|_| Error::Synthesis)
},
)
.unwrap();
Ok(())
}
} |
fn relucircuit() {
crate::logger::init_logger();
let a = Tensor::from((0..4).map(|i| Value::known(F::from(i + 1))));
let circuit = ReLUCircuit::<F> {
input: ValTensor::from(a),
};
let params = crate::pfsys::srs::gen_srs::<
halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme<_>,
>(4_u32);
let pk = crate::pfsys::create_keys::<
halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme<halo2curves::bn256::Bn256>,
ReLUCircuit<F>,
>(&circuit, ¶ms, true)
.unwrap();
let prover = crate::pfsys::create_proof_circuit::<
KZGCommitmentScheme<_>,
_,
ProverSHPLONK<_>,
VerifierSHPLONK<_>,
SingleStrategy<_>,
_,
EvmTranscript<_, _, _, _>,
EvmTranscript<_, _, _, _>,
>(
circuit.clone(),
vec![],
¶ms,
&pk,
CheckMode::SAFE,
crate::Commitments::KZG,
crate::pfsys::TranscriptType::EVM,
None,
None,
);
assert!(prover.is_ok());
}
} |
use serde::{Deserialize, Serialize};
use std::cmp::Ordering;
use std::fmt;
use std::hash::{Hash, Hasher};
pub |
struct F32(pub f32);
impl<'de> Deserialize<'de> for F32 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let float = f32::deserialize(deserializer)?;
Ok(F32(float))
}
}
impl Serialize for F32 {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
f32::serialize(&self.0, serializer)
}
}
impl PartialEq for F32 {
fn eq(&self, other: &Self) -> bool {
if self.0.is_nan() && other.0.is_nan() {
true
} else {
self.0 == other.0
}
}
}
impl Eq for F32 {}
impl PartialOrd for F32 {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for F32 {
fn cmp(&self, other: &Self) -> Ordering {
self.0.partial_cmp(&other.0).unwrap_or_else(|| {
if self.0.is_nan() && !other.0.is_nan() {
Ordering::Less
} else if !self.0.is_nan() && other.0.is_nan() {
Ordering::Greater
} else {
Ordering::Equal
}
})
}
}
impl Hash for F32 {
fn hash<H: Hasher>(&self, state: &mut H) {
if self.0.is_nan() {
0x7fc00000u32.hash(state);
} else if self.0 == 0.0 {
0u32.hash(state);
} else {
self.0.to_bits().hash(state);
}
}
}
impl From<F32> for f32 {
fn from(f: F32) -> Self {
f.0
}
}
impl From<f32> for F32 {
fn from(f: f32) -> Self {
F32(f)
}
}
impl From<f64> for F32 {
fn from(f: f64) -> Self {
F32(f as f32)
}
}
impl From<usize> for F32 {
fn from(f: usize) -> Self {
F32(f as f32)
}
}
impl From<F32> for f64 {
fn from(f: F32) -> Self {
f.0 as f64
}
}
impl From<&F32> for f64 {
fn from(f: &F32) -> Self {
f.0 as f64
}
}
impl fmt::Display for F32 {
fn fmt(&self, f: &mut fmt::Forma |
tter) -> fmt::Result {
self.0.fmt(f)
}
}
mod tests {
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
use super::F32;
fn calculate_hash<T: Hash>(t: &T) -> u64 {
let mut s = DefaultHasher::new();
t.hash(&mut s);
s.finish()
} |
fn f32_eq() {
assert!(F32(std::f32::NAN) == F32(std::f32::NAN));
assert!(F32(std::f32::NAN) != F32(5.0));
assert!(F32(5.0) != F32(std::f32::NAN));
assert!(F32(0.0) == F32(-0.0));
} |
fn f32_cmp() {
assert!(F32(std::f32::NAN) == F32(std::f32::NAN));
assert!(F32(std::f32::NAN) < F32(5.0));
assert!(F32(5.0) > F32(std::f32::NAN));
assert!(F32(0.0) == F32(-0.0));
} |
fn f32_hash() {
assert!(calculate_hash(&F32(0.0)) == calculate_hash(&F32(-0.0)));
assert!(calculate_hash(&F32(std::f32::NAN)) == calculate_hash(&F32(-std::f32::NAN)));
}
} |
use clap::{Parser, Subcommand};
use ethers::types::H160;
use pyo3::{
conversion::{FromPyObject, PyTryFrom},
exceptions::PyValueError,
prelude::*,
types::PyString,
};
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use std::{error::Error, str::FromStr};
use tosubcommand::{ToFlags, ToSubcommand};
use crate::{pfsys::ProofType, Commitments, RunArgs};
use crate::circuit::CheckMode;
use crate::graph::TestDataSource;
use crate::pfsys::TranscriptType;
pub const DEFAULT_DATA: &str = "input.json";
pub const DEFAULT_MODEL: &str = "network.onnx";
pub const DEFAULT_COMPILED_CIRCUIT: &str = "model.compiled";
pub const DEFAULT_WITNESS: &str = "witness.json";
pub const DEFAULT_SETTINGS: &str = "settings.json";
pub const DEFAULT_PK: &str = "pk.key";
pub const DEFAULT_VK: &str = "vk.key";
pub const DEFAULT_PK_AGGREGATED: &str = "pk_aggr.key";
pub const DEFAULT_VK_AGGREGATED: &str = "vk_aggr.key";
pub const DEFAULT_PROOF: &str = "proof.json";
pub const DEFAULT_PROOF_AGGREGATED: &str = "proof_aggr.json";
pub const DEFAULT_SPLIT: &str = "false";
pub const DEFAULT_VERIFIER_ABI: &str = "verifier_abi.json";
pub const DEFAULT_VERIFIER_AGGREGATED_ABI: &str = "verifier_aggr_abi.json";
pub const DEFAULT_VERIFIER_DA_ABI: &str = "verifier_da_abi.json";
pub const DEFAULT_SOL_CODE: &str = "evm_deploy.sol";
pub const DEFAULT_SOL_CODE_AGGREGATED: &str = "evm_deploy_aggr.sol";
pub const DEFAULT_SOL_CODE_DA: &str = "evm_deploy_da.sol";
pub const DEFAULT_CONTRACT_ADDRESS: &str = "contract.address";
pub const DEFAULT_CONTRACT_ADDRESS_DA: &str = "contract_da.address";
pub const DEFAULT_CONTRACT_ADDRESS_VK: &str = "contract_vk.address";
pub const DEFAULT_CHECKMODE: &str = "safe";
pub const DEFAULT_CALIBRATION_TARGET: &str = "resources";
pub const DEFAULT_AGGREGATED_LOGROWS: &str = "23";
pub const DEFAULT_OPTIMIZER_RUNS: &str = "1";
pub const DEFAULT_FUZZ_RUNS: &str = "10";
pub const DEFAULT_CALIBRATION_FILE: &str = "calibration.json";
pub const DEFAULT_LOOKUP_SAFETY_MARGIN: &str = "2";
pub |
const DEFAULT_DISABLE_SELECTOR_COMPRESSION: &str = "false";
pub const DEFAULT_RENDER_VK_SEPERATELY: &str = "false";
pub const DEFAULT_VK_SOL: &str = "vk.sol";
pub const DEFAULT_VK_ABI: &str = "vk.abi";
pub const DEFAULT_SCALE_REBASE_MULTIPLIERS: &str = "1,2,10";
pub const DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION: &str = "false";
pub const DEFAULT_ONLY_RANGE_CHECK_REBASE: &str = "false";
pub const DEFAULT_COMMITMENT: &str = "kzg";
impl IntoPy<PyObject> for TranscriptType {
fn into_py(self, py: Python) -> PyObject {
match self {
TranscriptType::Poseidon => "poseidon".to_object(py),
TranscriptType::EVM => "evm".to_object(py),
}
}
}
impl<'source> FromPyObject<'source> for TranscriptType {
fn extract(ob: &'source PyAny) -> PyResult<Self> {
let trystr = <PyString as PyTryFrom>::try_from(ob)?;
let strval = trystr.to_string();
match strval.to_lowercase().as_str() {
"poseidon" => Ok(TranscriptType::Poseidon),
"evm" => Ok(TranscriptType::EVM),
_ => Err(PyValueError::new_err("Invalid value for TranscriptType")),
}
}
}
pub enum CalibrationTarget {
Resources {
col_overflow: bool,
},
Accuracy,
}
impl Default for CalibrationTarget {
fn default() -> Self {
CalibrationTarget::Resources {
col_overflow: false,
}
}
}
impl std::fmt::Display for CalibrationTarget {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
match self {
CalibrationTarget::Resources { col_overflow: true } => {
"resources/col-overflow".to_string()
}
CalibrationTarget::Resources {
col_overflow: false,
} => "resources".to_string(),
CalibrationTarget::Accuracy => "accuracy".to_string(),
}
)
}
}
impl ToFlags for CalibrationTarget |
{
fn to_flags(&self) -> Vec<String> {
vec![format!("{}", self)]
}
}
impl From<&str> for CalibrationTarget {
fn from(s: &str) -> Self {
match s {
"resources" => CalibrationTarget::Resources {
col_overflow: false,
},
"resources/col-overflow" => CalibrationTarget::Resources { col_overflow: true },
"accuracy" => CalibrationTarget::Accuracy,
_ => {
log::error!("Invalid value for CalibrationTarget");
log::warn!("Defaulting to resources");
CalibrationTarget::default()
}
}
}
}
pub |
struct H160Flag {
inner: H160,
}
impl From<H160Flag> for H160 {
fn from(val: H160Flag) -> H160 {
val.inner
}
}
impl ToFlags for H160Flag {
fn to_flags(&self) -> Vec<String> {
vec![format!("{:
}
}
impl From<&str> for H160Flag {
fn from(s: &str) -> Self {
Self {
inner: H160::from_str(s).unwrap(),
}
}
}
impl IntoPy<PyObject> for CalibrationTarget {
fn into_py(self, py: Python) -> PyObject {
match self {
CalibrationTarget::Resources { col_overflow: true } => {
"resources/col-overflow".to_object(py)
}
CalibrationTarget::Resources {
col_overflow: false,
} => "resources".to_object(py),
CalibrationTarget::Accuracy => "accuracy".to_object(py),
}
}
}
impl<'source> FromPyObject<'source> for CalibrationTarget {
fn extract(ob: &'source PyAny) -> PyResult<Self> {
let trystr = <PyString as PyTryFrom>::try_from(ob)?;
let strval = trystr.to_string();
match strval.to_lowercase().as_str() {
"resources" => Ok(CalibrationTarget::Resources {
col_overflow: false,
}),
"resources/col-overflow" => Ok(CalibrationTarget::Resources { col_overflow: true }),
"accuracy" => Ok(CalibrationTarget::Accuracy),
_ => Err(PyValueError::new_err("Invalid value for CalibrationTarget")),
}
}
}
use lazy_static::lazy_static;
lazy_static! {
pub static ref VERSION: &'static str = if env!("CARGO_PKG_VERSION") == "0.0.0" {
"source - no compatibility guaranteed"
} else {
env!("CARGO_PKG_VERSION")
};
}
pub |
struct Cli {
pub command: Commands,
}
impl Cli {
pub fn as_json(&self) -> Result<String, Box<dyn Error>> {
let serialized = match serde_json::to_string(&self) {
Ok(s) => s,
Err(e) => {
return Err(Box::new(e));
}
};
Ok(serialized)
}
pub fn from_json(arg_json: &str) -> Result<Self, serde_json::Error> {
serde_json::from_str(arg_json)
}
}
pub enum Commands {
Empty,
Table {
model: PathBuf,
args: RunArgs,
},
GenWitness {
data: PathBuf,
compiled_circuit: PathBuf,
output: PathBuf,
vk_path: Option<PathBuf>,
srs_path: Option<PathBuf>,
},
GenSettings {
model: PathBuf,
settings_path: PathBuf,
args: RunArgs,
},
CalibrateSettings {
data: PathBuf,
model: PathBuf,
settings_path: PathBuf,
target: CalibrationTarget,
lookup_safety_margin: i128,
scales: Option<Vec<crate::Scale>>,
long,
value_delimiter = ',',
allow_hyphen_values = true,
default_value = DEFAULT_SCALE_REBASE_MULTIPLIERS
)]
scale_rebase_multiplier: Vec<u32>,
max_logrows: Option<u32>,
only_range_check_rebase: bool,
},
GenSrs {
srs_path: PathBuf,
logrows: usize,
commitment: Commitments,
},
GetSrs {
srs_path: Option<PathBuf>,
settings_path: Option<PathBuf>, |
logrows: Option<u32>,
commitment: Option<Commitments>,
},
Mock {
witness: PathBuf,
model: PathBuf,
},
MockAggregate {
aggregation_snarks: Vec<PathBuf>,
logrows: u32,
split_proofs: bool,
},
SetupAggregate {
sample_snarks: Vec<PathBuf>,
vk_path: PathBuf,
pk_path: PathBuf,
srs_path: Option<PathBuf>,
logrows: u32,
split_proofs: bool,
disable_selector_compression: bool,
commitment: Option<Commitments>,
},
Aggregate {
aggregation_snarks: Vec<PathBuf>,
pk_path: PathBuf,
proof_path: PathBuf,
srs_path: Option<PathBuf>,
long,
require_equals = true,
num_args = 0..=1,
default_value_t = TranscriptType::default(),
value_enum
)]
transcript: TranscriptType,
logrows: u32,
check_mode: CheckMode,
split_proofs: bool,
commitment: Option<Commitments>,
},
CompileCircuit {
model: PathBuf,
compiled_circuit: PathBuf,
settings_path: PathBuf,
},
Setup {
compiled_circuit: PathBuf,
srs_path: Option<PathBuf>,
vk_path: PathBuf,
pk_path: PathBuf,
witness: Option<PathBuf>,
disable_selector_compression: bool,
},
SetupTestEvmData {
da |
ta: PathBuf,
compiled_circuit: PathBuf,
test_data: PathBuf,
rpc_url: Option<String>,
input_source: TestDataSource,
output_source: TestDataSource,
},
TestUpdateAccountCalls {
addr: H160Flag,
data: PathBuf,
rpc_url: Option<String>,
},
SwapProofCommitments {
proof_path: PathBuf,
witness_path: PathBuf,
},
Prove {
witness: PathBuf,
compiled_circuit: PathBuf,
pk_path: PathBuf,
proof_path: PathBuf,
srs_path: Option<PathBuf>,
long,
require_equals = true,
num_args = 0..=1,
default_value_t = ProofType::Single,
value_enum
)]
proof_type: ProofType,
check_mode: CheckMode,
},
CreateEvmVerifier {
srs_path: Option<PathBuf>,
settings_path: PathBuf,
vk_path: PathBuf,
sol_code_path: PathBuf,
abi_path: PathBuf,
render_vk_seperately: bool,
},
CreateEvmVK {
srs_path: Option<PathBuf>,
settings_path: PathBuf,
vk_path: PathBuf,
sol_code_path: PathBuf,
abi_path: PathBuf,
},
CreateEvmDataAttestation {
settings_path: PathBuf,
sol_code_path: PathBuf,
abi_path: PathBuf,
d |
ata: PathBuf,
},
CreateEvmVerifierAggr {
srs_path: Option<PathBuf>,
vk_path: PathBuf,
sol_code_path: PathBuf,
abi_path: PathBuf,
aggregation_settings: Vec<PathBuf>,
logrows: u32,
render_vk_seperately: bool,
},
Verify {
settings_path: PathBuf,
proof_path: PathBuf,
vk_path: PathBuf,
srs_path: Option<PathBuf>,
reduced_srs: bool,
},
VerifyAggr {
proof_path: PathBuf,
vk_path: PathBuf,
reduced_srs: bool,
srs_path: Option<PathBuf>,
logrows: u32,
commitment: Option<Commitments>,
},
DeployEvmVerifier {
sol_code_path: PathBuf,
rpc_url: Option<String>,
addr_path: PathBuf,
optimizer_runs: usize,
private_key: Option<String>,
},
DeployEvmVK {
sol_code_path: PathBuf,
rpc_url: Option<String>,
addr_path: PathBuf,
optimizer_runs: usize,
private_key: Option<String>,
},
DeployEvmDataAttestation {
data: PathBuf,
settings_path: PathBuf,
sol_code_path: PathBuf,
rpc_url: Option<String>,
addr_path: PathBuf,
optimizer_runs: usize,
private_key: Option<String>,
},
VerifyEvm { |
proof_path: PathBuf,
addr_verifier: H160Flag,
rpc_url: Option<String>,
addr_da: Option<H160Flag>,
addr_vk: Option<H160Flag>,
},
} |
use crate::graph::input::{CallsToAccount, FileSourceInner, GraphData};
use crate::graph::modules::POSEIDON_INSTANCES;
use crate::graph::DataSource;
use crate::graph::GraphSettings;
use crate::pfsys::evm::EvmVerificationError;
use crate::pfsys::Snark;
use ethers::abi::Contract;
use ethers::contract::abigen;
use ethers::contract::ContractFactory;
use ethers::core::k256::ecdsa::SigningKey;
use ethers::middleware::SignerMiddleware;
use ethers::prelude::ContractInstance;
use ethers::prelude::Wallet;
use ethers::providers::Middleware;
use ethers::providers::{Http, Provider};
use ethers::signers::Signer;
use ethers::solc::{CompilerInput, Solc};
use ethers::types::transaction::eip2718::TypedTransaction;
use ethers::types::TransactionRequest;
use ethers::types::H160;
use ethers::types::U256;
use ethers::types::{Bytes, I256};
use ethers::{
prelude::{LocalWallet, Wallet},
utils::{Anvil, AnvilInstance},
};
use halo2_solidity_verifier::encode_calldata;
use halo2curves::bn256::{Fr, G1Affine};
use halo2curves::group::ff::PrimeField;
use log::{debug, info, warn};
use std::error::Error;
use std::path::PathBuf;
use std::time::Duration;
use std::{convert::TryFrom, sync::Arc};
pub type EthersClient = Arc<SignerMiddleware<Provider<Http>, Wallet<SigningKey>>>;
abigen!(TestReads, "./abis/TestReads.json");
abigen!(DataAttestation, "./abis/DataAttestation.json");
abigen!(QuantizeData, "./abis/QuantizeData.json");
const TESTREADS_SOL: &str = include_str!("../contracts/TestReads.sol");
const QUANTIZE_DATA_SOL: &str = include_str!("../contracts/QuantizeData.sol");
const ATTESTDATA_SOL: &str = include_str!("../contracts/AttestData.sol");
const LOADINSTANCES_SOL: &str = include_str!("../contracts/LoadInstances.sol");
pub async fn setup_eth_backend(
rpc_url: Option<&str>,
private_key: Option<&str>,
) -> Result<(AnvilInstance, EthersClient), Box<dyn Error>> {
let anvil = Anvil::new()
.args(["--code-size-limit=41943040", "--disable-block-gas-limit"])
.spawn();
let endpoint: String;
if |
let Some(rpc_url) = rpc_url {
endpoint = rpc_url.to_string();
} else {
endpoint = anvil.endpoint();
};
let provider = Provider::<Http>::try_from(endpoint)?.interval(Duration::from_millis(10u64));
let chain_id = provider.get_chainid().await?.as_u64();
info!("using chain {}", chain_id);
let wallet: LocalWallet;
if let Some(private_key) = private_key {
debug!("using private key {}", private_key);
let private_key_format_error =
"Private key must be in hex format, 64 chars, without 0x prefix";
if private_key.len() != 64 {
return Err(private_key_format_error.into());
}
let private_key_buffer = hex::decode(private_key)?;
let signing_key = SigningKey::from_slice(&private_key_buffer)?;
wallet = LocalWallet::from(signing_key);
} else {
wallet = anvil.keys()[0].clone().into();
}
let client = Arc::new(SignerMiddleware::new(
provider,
wallet.with_chain_id(chain_id),
));
Ok((anvil, client))
}
pub async fn deploy_contract_via_solidity(
sol_code_path: PathBuf,
rpc_url: Option<&str>,
runs: usize,
private_key: Option<&str>,
contract_name: &str,
) -> Result<ethers::types::Address, Box<dyn Error>> {
let (anvil, client) = setup_eth_backend(rpc_url, private_key).await?;
let (abi, bytecode, runtime_bytecode) =
get_contract_artifacts(sol_code_path, contract_name, runs)?;
let factory = get_sol_contract_factory(abi, bytecode, runtime_bytecode, client.clone())?;
let contract = factory.deploy(())?.send().await?;
let addr = contract.address();
drop(anvil);
Ok(addr)
}
pub async fn deploy_da_verifier_via_solidity(
settings_path: PathBuf,
input: PathBuf,
sol_code_path: PathBuf,
rpc_url: Option<&str>,
runs: usize,
private_key: Option<&str>,
) -> Result<ethers::types::Address, Box<dyn Error>> {
let (anvil, client) = setup_eth_backend(rpc_url, private_key).aw |
ait?;
let input = GraphData::from_path(input)?;
let settings = GraphSettings::load(&settings_path)?;
let mut scales: Vec<u32> = vec![];
let mut calls_to_accounts = vec![];
let mut instance_shapes = vec![];
let mut model_instance_offset = 0;
if settings.run_args.input_visibility.is_hashed() {
instance_shapes.push(POSEIDON_INSTANCES)
} else if settings.run_args.input_visibility.is_public() {
for idx in 0..settings.model_input_scales.len() {
let shape = &settings.model_instance_shapes[idx];
instance_shapes.push(shape.iter().product::<usize>());
model_instance_offset += 1;
}
}
if settings.run_args.param_visibility.is_hashed() {
return Err(Box::new(EvmVerificationError::InvalidVisibility));
}
if settings.run_args.output_visibility.is_hashed() {
instance_shapes.push(POSEIDON_INSTANCES)
} else if settings.run_args.output_visibility.is_public() {
for idx in model_instance_offset..model_instance_offset + settings.model_output_scales.len()
{
let shape = &settings.model_instance_shapes[idx];
instance_shapes.push(shape.iter().product::<usize>());
}
}
let mut instance_idx = 0;
let mut contract_instance_offset = 0;
if let DataSource::OnChain(source) = input.input_data {
if settings.run_args.input_visibility.is_hashed_public() {
scales.extend(vec![0; instance_shapes[instance_idx]]);
instance_idx += 1;
} else {
let input_scales = settings.model_input_scales;
for scale in input_scales {
scales.extend(vec![scale as u32; instance_shapes[instance_idx]]);
instance_idx += 1;
}
}
for call in source.calls {
calls_to_accounts.push(call);
}
} else if let DataSource::File(source) = input.input_data {
if settings.run_args.input_visibility.is_public() { |
instance_idx += source.len();
for s in source {
contract_instance_offset += s.len();
}
}
}
if let Some(DataSource::OnChain(source)) = input.output_data {
if settings.run_args.output_visibility.is_hashed_public() {
scales.extend(vec![0; instance_shapes[instance_idx]]);
} else {
let input_scales = settings.model_output_scales;
for scale in input_scales {
scales.extend(vec![scale as u32; instance_shapes[instance_idx]]);
instance_idx += 1;
}
}
for call in source.calls {
calls_to_accounts.push(call);
}
}
let (contract_addresses, call_data, decimals) = if !calls_to_accounts.is_empty() {
parse_calls_to_accounts(calls_to_accounts)?
} else {
return Err("Data source for either input_data or output_data must be OnChain".into());
};
let (abi, bytecode, runtime_bytecode) =
get_contract_artifacts(sol_code_path, "DataAttestation", runs)?;
let factory = get_sol_contract_factory(abi, bytecode, runtime_bytecode, client.clone())?;
info!("call_data: {:
info!("contract_addresses: {:
info!("decimals: {:
let contract = factory
.deploy((
contract_addresses,
call_data,
decimals,
scales,
contract_instance_offset as u32,
client.address(),
))?
.send()
.await?;
drop(anvil);
Ok(contract.address())
}
type ParsedCallsToAccount = (Vec<H160>, Vec<Vec<Bytes>>, Vec<Vec<U256>>);
fn parse_calls_to_accounts(
calls_to_accounts: Vec<CallsToAccount>,
) -> Result<ParsedCallsToAccount, Box<dyn Error>> {
let mut contract_addresses = vec![];
let mut call_data = vec![];
let mut decimals: Vec<Vec<U256>> = vec![];
for (i, val) in calls_to_accounts.iter().enumerate() {
let contract_address_bytes = hex::decode(val.address.clone())?; |
let contract_address = H160::from_slice(&contract_address_bytes);
contract_addresses.push(contract_address);
call_data.push(vec![]);
decimals.push(vec![]);
for (call, decimal) in &val.call_data {
let call_data_bytes = hex::decode(call)?;
call_data[i].push(ethers::types::Bytes::from(call_data_bytes));
decimals[i].push(ethers::types::U256::from_dec_str(&decimal.to_string())?);
}
}
Ok((contract_addresses, call_data, decimals))
}
pub async fn update_account_calls(
addr: H160,
input: PathBuf,
rpc_url: Option<&str>,
) -> Result<(), Box<dyn Error>> {
let input = GraphData::from_path(input)?;
let mut calls_to_accounts = vec![];
if let DataSource::OnChain(source) = input.input_data {
for call in source.calls {
calls_to_accounts.push(call);
}
}
if let Some(DataSource::OnChain(source)) = input.output_data {
for call in source.calls {
calls_to_accounts.push(call);
}
}
let (contract_addresses, call_data, decimals) = if !calls_to_accounts.is_empty() {
parse_calls_to_accounts(calls_to_accounts)?
} else {
return Err("Data source for either input_data or output_data must be OnChain".into());
};
let (anvil, client) = setup_eth_backend(rpc_url, None).await?;
let contract = DataAttestation::new(addr, client.clone());
contract
.update_account_calls(
contract_addresses.clone(),
call_data.clone(),
decimals.clone(),
)
.send()
.await?;
let wallet: LocalWallet = anvil.keys()[1].clone().into();
let client = Arc::new(client.with_signer(wallet.with_chain_id(anvil.chain_id())));
let contract = DataAttestation::new(addr, client.clone());
if (contract
.update_account_calls(contract_addresses, call_data, decimals)
.send()
.await)
.is_err()
{
info!("update_account_calls faile |
d as expected");
} else {
return Err("update_account_calls should have failed".into());
}
Ok(())
}
pub async fn verify_proof_via_solidity(
proof: Snark<Fr, G1Affine>,
addr: ethers::types::Address,
addr_vk: Option<H160>,
rpc_url: Option<&str>,
) -> Result<bool, Box<dyn Error>> {
let flattened_instances = proof.instances.into_iter().flatten();
let encoded = encode_calldata(
addr_vk.as_ref().map(|x| x.0),
&proof.proof,
&flattened_instances.collect::<Vec<_>>(),
);
info!("encoded: {:
let (anvil, client) = setup_eth_backend(rpc_url, None).await?;
let tx: TypedTransaction = TransactionRequest::default()
.to(addr)
.from(client.address())
.data(encoded)
.into();
debug!("transaction {:
let result = client.call(&tx, None).await;
if result.is_err() {
return Err(Box::new(EvmVerificationError::SolidityExecution));
}
let result = result?;
info!("result: {:
let result = result.to_vec().last().ok_or("no contract output")? == &1u8;
if !result {
return Err(Box::new(EvmVerificationError::InvalidProof));
}
let gas = client.estimate_gas(&tx, None).await?;
info!("estimated verify gas cost: {:
if gas > 30_000_000.into() {
warn!(
"Gas cost of verify transaction is greater than 30 million block gas limit. It will fail on mainnet."
);
} else if gas > 15_000_000.into() {
warn!(
"Gas cost of verify transaction is greater than 15 million, the target block size for ethereum"
);
}
drop(anvil);
Ok(true)
}
fn count_decimal_places(num: f32) -> usize {
let s = num.to_string();
match s.find('.') {
Some(index) => {
s[index + 1..].len()
}
None => 0,
}
}
pub async fn setup_test_contract<M: 'static + Middleware>(
client: Arc<M>,
data: &[Vec<FileSourceInner>],
) -> Result<(ContractInstance<Arc<M>, M>, Ve |
c<u8>), Box<dyn Error>> {
let mut sol_path = std::env::temp_dir();
sol_path.push("testreads.sol");
std::fs::write(&sol_path, TESTREADS_SOL)?;
let (abi, bytecode, runtime_bytecode) = get_contract_artifacts(sol_path, "TestReads", 0)?;
let factory = get_sol_contract_factory(abi, bytecode, runtime_bytecode, client.clone())?;
let mut decimals = vec![];
let mut scaled_by_decimals_data = vec![];
for input in &data[0] {
if input.is_float() {
let input = input.to_float() as f32;
let decimal_places = count_decimal_places(input) as u8;
let scaled_by_decimals = input * f32::powf(10., decimal_places.into());
scaled_by_decimals_data.push(I256::from(scaled_by_decimals as i128));
decimals.push(decimal_places);
} else if input.is_field() {
let input = input.to_field(0);
let hex_str_fr = format!("{:?}", input);
scaled_by_decimals_data.push(I256::from_raw(U256::from_str_radix(&hex_str_fr, 16)?));
decimals.push(0);
}
}
let contract = factory.deploy(scaled_by_decimals_data)?.send().await?;
Ok((contract, decimals))
}
pub async fn verify_proof_with_data_attestation(
proof: Snark<Fr, G1Affine>,
addr_verifier: ethers::types::Address,
addr_da: ethers::types::Address,
addr_vk: Option<H160>,
rpc_url: Option<&str>,
) -> Result<bool, Box<dyn Error>> {
use ethers::abi::{Function, Param, ParamType, StateMutability, Token};
let mut public_inputs: Vec<U256> = vec![];
let flattened_instances = proof.instances.into_iter().flatten();
for val in flattened_instances.clone() {
let bytes = val.to_repr();
let u = U256::from_little_endian(bytes.as_slice());
public_inputs.push(u);
}
let encoded_verifier = encode_calldata(
addr_vk.as_ref().map(|x| x.0),
&proof.proof,
&flattened_instances.collect::<Vec<_>>(),
);
info!("encoded: {:
info!("public_inputs: {:
i |
nfo!(
"proof: {:
ethers::types::Bytes::from(proof.proof.to_vec())
);
let func = Function {
name: "verifyWithDataAttestation".to_owned(),
inputs: vec![
Param {
name: "verifier".to_owned(),
kind: ParamType::Address,
internal_type: None,
},
Param {
name: "encoded".to_owned(),
kind: ParamType::Bytes,
internal_type: None,
},
],
outputs: vec![Param {
name: "success".to_owned(),
kind: ParamType::Bool,
internal_type: None,
}],
constant: None,
state_mutability: StateMutability::View,
};
let encoded = func.encode_input(&[
Token::Address(addr_verifier),
Token::Bytes(encoded_verifier),
])?;
info!("encoded: {:
let (anvil, client) = setup_eth_backend(rpc_url, None).await?;
let tx: TypedTransaction = TransactionRequest::default()
.to(addr_da)
.from(client.address())
.data(encoded)
.into();
debug!("transaction {:
info!(
"estimated verify gas cost: {:
client.estimate_gas(&tx, None).await?
);
let result = client.call(&tx, None).await;
if result.is_err() {
return Err(Box::new(EvmVerificationError::SolidityExecution));
}
let result = result?;
info!("result: {:
let result = result.to_vec().last().ok_or("no contract output")? == &1u8;
if !result {
return Err(Box::new(EvmVerificationError::InvalidProof));
}
drop(anvil);
Ok(true)
}
pub fn get_provider(rpc_url: &str) -> Result<Provider<Http>, Box<dyn Error>> {
let provider = Provider::<Http>::try_from(rpc_url)?;
debug!("{:
Ok(provider)
}
pub async fn test_on_chain_data<M: 'static + Middleware>(
client: Arc<M>,
data: &[Vec<FileSourceInner>],
) -> Result<Vec<CallsToAccount>, Box<dyn Error>> {
let (contract, decimals) = setup_test_contract |
(client.clone(), data).await?;
let contract = TestReads::new(contract.address(), client.clone());
let mut calldata = vec![];
for (i, _) in data.iter().flatten().enumerate() {
let function = contract.method::<_, I256>("arr", i as u32)?;
let call = function.calldata().ok_or("could not get calldata")?;
calldata.push((hex::encode(call), decimals[i]));
}
let calls_to_account = CallsToAccount {
call_data: calldata,
address: hex::encode(contract.address().as_bytes()),
};
info!("calls_to_account: {:
Ok(vec![calls_to_account])
}
pub async fn read_on_chain_inputs<M: 'static + Middleware>(
client: Arc<M>,
address: H160,
data: &Vec<CallsToAccount>,
) -> Result<(Vec<Bytes>, Vec<u8>), Box<dyn Error>> {
let mut fetched_inputs = vec![];
let mut decimals = vec![];
for on_chain_data in data {
let contract_address_bytes = hex::decode(on_chain_data.address.clone())?;
let contract_address = H160::from_slice(&contract_address_bytes);
for (call_data, decimal) in &on_chain_data.call_data {
let call_data_bytes = hex::decode(call_data.clone())?;
let tx: TypedTransaction = TransactionRequest::default()
.to(contract_address)
.from(address)
.data(call_data_bytes)
.into();
debug!("transaction {:
let result = client.call(&tx, None).await?;
debug!("return data {:
fetched_inputs.push(result);
decimals.push(*decimal);
}
}
Ok((fetched_inputs, decimals))
}
pub async fn evm_quantize<M: 'static + Middleware>(
client: Arc<M>,
scales: Vec<crate::Scale>,
data: &(Vec<ethers::types::Bytes>, Vec<u8>),
) -> Result<Vec<Fr>, Box<dyn Error>> {
let mut sol_path = std::env::temp_dir();
sol_path.push("quantizedata.sol");
std::fs::write(&sol_path, QUANTIZE_DATA_SOL)?;
let (abi, bytecode, runtime_bytecode) = get |
_contract_artifacts(sol_path, "QuantizeData", 0)?;
let factory = get_sol_contract_factory(abi, bytecode, runtime_bytecode, client.clone())?;
let contract = factory.deploy(())?.send().await?;
let contract = QuantizeData::new(contract.address(), client.clone());
let fetched_inputs = data.0.clone();
let decimals = data.1.clone();
let fetched_inputs = fetched_inputs
.iter()
.map(|x| Result::<_, std::convert::Infallible>::Ok(ethers::types::Bytes::from(x.to_vec())))
.collect::<Result<Vec<Bytes>, _>>()?;
let decimals = decimals
.iter()
.map(|x| U256::from_dec_str(&x.to_string()))
.collect::<Result<Vec<U256>, _>>()?;
let scales = scales
.iter()
.map(|x| U256::from_dec_str(&x.to_string()))
.collect::<Result<Vec<U256>, _>>()?;
info!("scales: {:
info!("decimals: {:
info!("fetched_inputs: {:
let results = contract
.quantize_data(fetched_inputs, decimals, scales)
.call()
.await?;
let felts = contract.to_field_element(results.clone()).call().await?;
info!("evm quantization contract results: {:
let results = felts
.iter()
.map(|x| PrimeField::from_str_vartime(&x.to_string()).unwrap())
.collect::<Vec<Fr>>();
info!("evm quantization results: {:
Ok(results.to_vec())
}
fn get_sol_contract_factory<M: 'static + Middleware>(
abi: Contract,
bytecode: Bytes,
runtime_bytecode: Bytes,
client: Arc<M>,
) -> Result<ContractFactory<M>, Box<dyn Error>> {
const MAX_RUNTIME_BYTECODE_SIZE: usize = 24577;
let size = runtime_bytecode.len();
debug!("runtime bytecode size: {:
if size > MAX_RUNTIME_BYTECODE_SIZE {
warn!(
"Solidity runtime bytecode size is: {:
which exceeds 24577 bytes spurious dragon limit.
Contract will fail to deploy on any chain with
EIP 140 enabled",
size
);
}
Ok(ContractFactory::new(abi, bytecode, client))
} |
pub fn get_contract_artifacts(
sol_code_path: PathBuf,
contract_name: &str,
runs: usize,
) -> Result<(Contract, Bytes, Bytes), Box<dyn Error>> {
if !sol_code_path.exists() {
return Err("sol_code_path does not exist".into());
}
let input: CompilerInput = if runs > 0 {
let mut i = CompilerInput::new(sol_code_path)?[0]
.clone()
.optimizer(runs);
i.settings.optimizer.enable();
i
} else {
CompilerInput::new(sol_code_path)?[0].clone()
};
let compiled = Solc::default().compile(&input)?;
let (abi, bytecode, runtime_bytecode) = match compiled.find(contract_name) {
Some(c) => c.into_parts_or_default(),
None => {
return Err("could not find contract".into());
}
};
Ok((abi, bytecode, runtime_bytecode))
}
pub fn fix_da_sol(
input_data: Option<Vec<CallsToAccount>>,
output_data: Option<Vec<CallsToAccount>>,
) -> Result<String, Box<dyn Error>> {
let mut accounts_len = 0;
let mut contract = ATTESTDATA_SOL.to_string();
let load_instances = LOADINSTANCES_SOL.to_string();
contract = contract.replace(
" |
import './LoadInstances.sol';",
&load_instances[load_instances
.find("contract")
.ok_or("could not get load-instances contract")?..],
);
if let Some(input_data) = input_data {
let input_calls: usize = input_data.iter().map(|v| v.call_data.len()).sum();
accounts_len = input_data.len();
contract = contract.replace(
"uint256 constant INPUT_CALLS = 0;",
&format!("uint256 constant INPUT_CALLS = {};", input_calls),
);
}
if let Some(output_data) = output_data {
let output_calls: usize = output_data.iter().map(|v| v.call_data.len()).sum();
accounts_len += output_data.len();
contract = contract.replace(
"uint256 constant OUTPUT_CALLS = 0;",
&format!("uint256 constant OUTPUT_CALLS = {};", output_calls),
);
}
contract = contract.replace("AccountCall[]", &format!("AccountCall[{}]", accounts_len));
Ok(contract)
} |
use crate::circuit::CheckMode;
use crate::commands::CalibrationTarget;
use crate::commands::Commands;
use crate::commands::H160Flag;
use crate::eth::{deploy_contract_via_solidity, deploy_da_verifier_via_solidity};
use crate::eth::{fix_da_sol, get_contract_artifacts, verify_proof_via_solidity};
use crate::graph::input::GraphData;
use crate::graph::{GraphCircuit, GraphSettings, GraphWitness, Model};
use crate::graph::{TestDataSource, TestSources};
use crate::pfsys::evm::aggregation_kzg::{AggregationCircuit, PoseidonTranscript};
use crate::pfsys::{
create_keys, load_pk, load_vk, save_params, save_pk, Snark, StrategyType, TranscriptType,
};
use crate::pfsys::{
create_proof_circuit, swap_proof_commitments_polycommit, verify_proof_circuit, ProofSplitCommit,
};
use crate::pfsys::{save_vk, srs::*};
use crate::tensor::TensorError;
use crate::{Commitments, RunArgs};
use colored::Colorize;
use gag::Gag;
use halo2_proofs::dev::VerifyFailure;
use halo2_proofs::plonk::{self, Circuit};
use halo2_proofs::poly::commitment::{CommitmentScheme, Params};
use halo2_proofs::poly::commitment::{ParamsProver, Verifier};
use halo2_proofs::poly::ipa::commitment::{IPACommitmentScheme, ParamsIPA};
use halo2_proofs::poly::ipa::multiopen::{ProverIPA, VerifierIPA};
use halo2_proofs::poly::ipa::strategy::AccumulatorStrategy as IPAAccumulatorStrategy;
use halo2_proofs::poly::ipa::strategy::SingleStrategy as IPASingleStrategy;
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
use halo2_proofs::poly::kzg::multiopen::{ProverSHPLONK, VerifierSHPLONK};
use halo2_proofs::poly::kzg::strategy::AccumulatorStrategy as KZGAccumulatorStrategy;
use halo2_proofs::poly::kzg::{
commitment::ParamsKZG, strategy::SingleStrategy as KZGSingleStrategy,
};
use halo2_proofs::poly::VerificationStrategy;
use halo2_proofs::transcript::{EncodedChallenge, TranscriptReadBuffer};
use halo2_solidity_verifier;
use halo2curves::bn256::{Bn256, Fr, G1Affine};
use halo2curves::ff::{FromUniformBytes, WithSmallOrderMulGroup};
use halo2curves::serde::Se |
rdeObject;
use indicatif::{ProgressBar, ProgressStyle};
use instant::Instant;
use itertools::Itertools;
use log::debug;
use log::{info, trace, warn};
use serde::de::DeserializeOwned;
use serde::Serialize;
use snark_verifier::loader::native::NativeLoader;
use snark_verifier::system::halo2::compile;
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
use snark_verifier::system::halo2::Config;
use std::error::Error;
use std::fs::File;
use std::io::{Cursor, Write};
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use std::sync::OnceLock;
use crate::EZKL_BUF_CAPACITY;
use std::io::BufWriter;
use std::time::Duration;
use tabled::Tabled;
use thiserror::Error;
static _SOLC_REQUIREMENT: OnceLock<bool> = OnceLock::new(); |
fn check_solc_requirement() {
info!("checking solc installation..");
_SOLC_REQUIREMENT.get_or_init(|| match Command::new("solc").arg("--version").output() {
Ok(output) => {
debug!("solc output: {:
debug!("solc output success: {:
if !output.status.success() {
log::error!(
"`solc` check failed: {}",
String::from_utf8_lossy(&output.stderr)
);
return false;
}
debug!("solc check passed, proceeding");
true
}
Err(_) => {
log::error!("`solc` check failed: solc not found");
false
}
});
}
use lazy_static::lazy_static;
lazy_static! {
pub static ref EZKL_REPO_PATH: String =
std::env::var("EZKL_REPO_PATH").unwrap_or_else(|_|
format!("{}/.ezkl", std::env::var("HOME").unwrap())
);
pub static ref EZKL_SRS_REPO_PATH: String = format!("{}/srs", *EZKL_REPO_PATH);
}
pub enum ExecutionError {
VerifyError(Vec<VerifyFailure>),
}
lazy_static::lazy_static! {
static ref WORKING_DIR: PathBuf = {
let wd = std::env::var("EZKL_WORKING_DIR").unwrap_or_else(|_| ".".to_string());
PathBuf::from(wd)
};
}
pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
std::env::set_current_dir(WORKING_DIR.as_path())?;
match command {
Commands::Empty => Ok(String::new()),
Commands::GenSrs {
srs_path,
logrows,
commitment,
} => gen_srs_cmd(srs_path, logrows as u32, commitment),
Commands::GetSrs {
srs_path,
settings_path,
logrows,
commitment,
} => get_srs_cmd(srs_path, settings_path, logrows, commitment).await,
Commands::Table { model, args } => table(model, args),
Commands::GenSettings {
model,
settings_path, |
args,
} => gen_circuit_settings(model, settings_path, args),
Commands::CalibrateSettings {
model,
settings_path,
data,
target,
lookup_safety_margin,
scales,
scale_rebase_multiplier,
max_logrows,
only_range_check_rebase,
} => calibrate(
model,
data,
settings_path,
target,
lookup_safety_margin,
scales,
scale_rebase_multiplier,
only_range_check_rebase,
max_logrows,
)
.map(|e| serde_json::to_string(&e).unwrap()),
Commands::GenWitness {
data,
compiled_circuit,
output,
vk_path,
srs_path,
} => gen_witness(compiled_circuit, data, Some(output), vk_path, srs_path)
.map(|e| serde_json::to_string(&e).unwrap()),
Commands::Mock { model, witness } => mock(model, witness),
Commands::CreateEvmVerifier {
vk_path,
srs_path,
settings_path,
sol_code_path,
abi_path,
render_vk_seperately,
} => create_evm_verifier(
vk_path,
srs_path,
settings_path,
sol_code_path,
abi_path,
render_vk_seperately,
),
Commands::CreateEvmVK {
vk_path,
srs_path,
settings_path,
sol_code_path,
abi_path,
} => create_evm_vk(vk_path, srs_path, settings_path, sol_code_path, abi_path),
Commands::CreateEvmDataAttestation {
settings_path,
sol_code_path,
abi_path,
data,
} => create_evm_data_attestation(settings_path, sol_code_path, abi_path, data),
Commands::CreateEvmVerifierAggr {
vk_path,
srs_path,
sol_code_path,
abi_pat |
h,
aggregation_settings,
logrows,
render_vk_seperately,
} => create_evm_aggregate_verifier(
vk_path,
srs_path,
sol_code_path,
abi_path,
aggregation_settings,
logrows,
render_vk_seperately,
),
Commands::CompileCircuit {
model,
compiled_circuit,
settings_path,
} => compile_circuit(model, compiled_circuit, settings_path),
Commands::Setup {
compiled_circuit,
srs_path,
vk_path,
pk_path,
witness,
disable_selector_compression,
} => setup(
compiled_circuit,
srs_path,
vk_path,
pk_path,
witness,
disable_selector_compression,
),
Commands::SetupTestEvmData {
data,
compiled_circuit,
test_data,
rpc_url,
input_source,
output_source,
} => {
setup_test_evm_witness(
data,
compiled_circuit,
test_data,
rpc_url,
input_source,
output_source,
)
.await
}
Commands::TestUpdateAccountCalls {
addr,
data,
rpc_url,
} => test_update_account_calls(addr, data, rpc_url).await,
Commands::SwapProofCommitments {
proof_path,
witness_path,
} => swap_proof_commitments_cmd(proof_path, witness_path)
.map(|e| serde_json::to_string(&e).unwrap()),
Commands::Prove {
witness,
compiled_circuit,
pk_path,
proof_path,
srs_path,
proof_type,
check_mode,
} => prove(
witness,
compiled_circuit,
pk_path,
Some(proo |
f_path),
srs_path,
proof_type,
check_mode,
)
.map(|e| serde_json::to_string(&e).unwrap()),
Commands::MockAggregate {
aggregation_snarks,
logrows,
split_proofs,
} => mock_aggregate(aggregation_snarks, logrows, split_proofs),
Commands::SetupAggregate {
sample_snarks,
vk_path,
pk_path,
srs_path,
logrows,
split_proofs,
disable_selector_compression,
commitment,
} => setup_aggregate(
sample_snarks,
vk_path,
pk_path,
srs_path,
logrows,
split_proofs,
disable_selector_compression,
commitment.into(),
),
Commands::Aggregate {
proof_path,
aggregation_snarks,
pk_path,
srs_path,
transcript,
logrows,
check_mode,
split_proofs,
commitment,
} => aggregate(
proof_path,
aggregation_snarks,
pk_path,
srs_path,
transcript,
logrows,
check_mode,
split_proofs,
commitment.into(),
)
.map(|e| serde_json::to_string(&e).unwrap()),
Commands::Verify {
proof_path,
settings_path,
vk_path,
srs_path,
reduced_srs,
} => verify(proof_path, settings_path, vk_path, srs_path, reduced_srs)
.map(|e| serde_json::to_string(&e).unwrap()),
Commands::VerifyAggr {
proof_path,
vk_path,
srs_path,
reduced_srs,
logrows,
commitment,
} => verify_aggr(
proof_path,
vk_path,
srs_path,
logrows,
reduced_srs,
commitment.into(),
)
.map(|e| serde_json::to_strin |
g(&e).unwrap()),
Commands::DeployEvmVerifier {
sol_code_path,
rpc_url,
addr_path,
optimizer_runs,
private_key,
} => {
deploy_evm(
sol_code_path,
rpc_url,
addr_path,
optimizer_runs,
private_key,
"Halo2Verifier",
)
.await
}
Commands::DeployEvmVK {
sol_code_path,
rpc_url,
addr_path,
optimizer_runs,
private_key,
} => {
deploy_evm(
sol_code_path,
rpc_url,
addr_path,
optimizer_runs,
private_key,
"Halo2VerifyingKey",
)
.await
}
Commands::DeployEvmDataAttestation {
data,
settings_path,
sol_code_path,
rpc_url,
addr_path,
optimizer_runs,
private_key,
} => {
deploy_da_evm(
data,
settings_path,
sol_code_path,
rpc_url,
addr_path,
optimizer_runs,
private_key,
)
.await
}
Commands::VerifyEvm {
proof_path,
addr_verifier,
rpc_url,
addr_da,
addr_vk,
} => verify_evm(proof_path, addr_verifier, rpc_url, addr_da, addr_vk).await,
}
}
pub fn get_srs_path(logrows: u32, srs_path: Option<PathBuf>, commitment: Commitments) -> PathBuf {
if let Some(srs_path) = srs_path {
srs_path
} else {
if !Path::new(&*EZKL_SRS_REPO_PATH).exists() {
std::fs::create_dir_all(&*EZKL_SRS_REPO_PATH).unwrap();
}
match commitment {
Commitments::KZG => Path::new(&*EZKL_SRS_REPO_PATH).join(format!("kzg{}.srs", lo |
grows)),
Commitments::IPA => Path::new(&*EZKL_SRS_REPO_PATH).join(format!("ipa{}.srs", logrows)),
}
}
}
fn srs_exists_check(logrows: u32, srs_path: Option<PathBuf>, commitment: Commitments) -> bool {
Path::new(&get_srs_path(logrows, srs_path, commitment)).exists()
}
pub(crate) fn gen_srs_cmd(
srs_path: PathBuf,
logrows: u32,
commitment: Commitments,
) -> Result<String, Box<dyn Error>> {
match commitment {
Commitments::KZG => {
let params = gen_srs::<KZGCommitmentScheme<Bn256>>(logrows);
save_params::<KZGCommitmentScheme<Bn256>>(&srs_path, ¶ms)?;
}
Commitments::IPA => {
let params = gen_srs::<IPACommitmentScheme<G1Affine>>(logrows);
save_params::<IPACommitmentScheme<G1Affine>>(&srs_path, ¶ms)?;
}
}
Ok(String::new())
}
async fn fetch_srs(uri: &str) -> Result<Vec<u8>, Box<dyn Error>> {
let pb = {
let pb = init_spinner();
pb.set_message("Downloading SRS (this may take a while) ...");
pb
};
let client = reqwest::Client::new();
let mut resp = client.get(uri).body(vec![]).send().await?;
let mut buf = vec![];
while let Some(chunk) = resp.chunk().await? {
buf.extend(chunk.to_vec());
}
pb.finish_with_message("SRS downloaded.");
Ok(std::mem::take(&mut buf))
}
pub(crate) fn get_file_hash(path: &PathBuf) -> Result<String, Box<dyn Error>> {
use std::io::Read;
let file = std::fs::File::open(path)?;
let mut reader = std::io::BufReader::new(file);
let mut buffer = vec![];
let bytes_read = reader.read_to_end(&mut buffer)?;
info!(
"read {} bytes from file (vector of len = {})",
bytes_read,
buffer.len()
);
let hash = sha256::digest(buffer);
info!("file hash: {}", hash);
Ok(hash)
}
fn check_srs_hash(
logrows: u32,
srs_path: Option<PathBuf>,
commitment: Commitments,
) -> Result<String, Box<dyn Error>> {
let path = get_srs_path(logr |
ows, srs_path, commitment);
let hash = get_file_hash(&path)?;
let predefined_hash = match crate::srs_sha::PUBLIC_SRS_SHA256_HASHES.get(&logrows) {
Some(h) => h,
None => return Err(format!("SRS (k={}) hash not found in public set", logrows).into()),
};
if hash != *predefined_hash {
warn!("removing SRS file at {}", path.display());
std::fs::remove_file(path)?;
return Err(
"SRS hash does not match the expected hash. Remote SRS may have been tampered with."
.into(),
);
}
Ok(hash)
}
pub(crate) async fn get_srs_cmd(
srs_path: Option<PathBuf>,
settings_path: Option<PathBuf>,
logrows: Option<u32>,
commitment: Option<Commitments>,
) -> Result<String, Box<dyn Error>> {
let err_string = "You will need to provide a valid settings file to use the settings option. You should run gen-settings to generate a settings file (and calibrate-settings to pick optimal logrows).";
let k = if let Some(k) = logrows {
k
} else if let Some(settings_p) = &settings_path {
if settings_p.exists() {
let settings = GraphSettings::load(settings_p)?;
settings.run_args.logrows
} else {
return Err(err_string.into());
}
} else {
return Err(err_string.into());
};
let commitment = if let Some(c) = commitment {
c
} else if let Some(settings_p) = settings_path {
if settings_p.exists() {
let settings = GraphSettings::load(&settings_p)?;
settings.run_args.commitment.into()
} else {
return Err(err_string.into());
}
} else {
return Err(err_string.into());
};
if !srs_exists_check(k, srs_path.clone(), commitment) {
if matches!(commitment, Commitments::KZG) {
info!("SRS does not exist, downloading...");
let srs_uri = format!("{}{}", PUBLIC_SRS_URL, k);
let mut reader = Cursor::new(fetch_srs(&srs_ |
uri).await?);
let pb = init_spinner();
pb.set_message("Validating SRS (this may take a while) ...");
let params = ParamsKZG::<Bn256>::read(&mut reader)?;
pb.finish_with_message("SRS validated.");
info!("Saving SRS to disk...");
let mut file = std::fs::File::create(get_srs_path(k, srs_path.clone(), commitment))?;
let mut buffer = BufWriter::with_capacity(*EZKL_BUF_CAPACITY, &mut file);
params.write(&mut buffer)?;
info!("Saved SRS to disk.");
info!("SRS downloaded");
} else {
let path = get_srs_path(k, srs_path.clone(), commitment);
gen_srs_cmd(path, k, commitment)?;
}
} else {
info!("SRS already exists at that path");
};
if matches!(commitment, Commitments::KZG) {
check_srs_hash(k, srs_path.clone(), commitment)?;
}
Ok(String::new())
}
pub(crate) fn table(model: PathBuf, run_args: RunArgs) -> Result<String, Box<dyn Error>> {
let model = Model::from_run_args(&run_args, &model)?;
info!("\n {}", model.table_nodes());
Ok(String::new())
}
pub(crate) fn gen_witness(
compiled_circuit_path: PathBuf,
data: PathBuf,
output: Option<PathBuf>,
vk_path: Option<PathBuf>,
srs_path: Option<PathBuf>,
) -> Result<GraphWitness, Box<dyn Error>> {
let mut circuit = GraphCircuit::load(compiled_circuit_path)?;
let data = GraphData::from_path(data)?;
let settings = circuit.settings().clone();
let vk = if let Some(vk) = vk_path {
Some(load_vk::<KZGCommitmentScheme<Bn256>, GraphCircuit>(
vk,
settings.clone(),
)?)
} else {
None
};
let mut input = circuit.load_graph_input(&data)?;
let mut input = circuit.load_graph_input(&data)?;
let commitment: Commitments = settings.run_args.commitment.into();
let start_time = Instant::now();
let witness = |
if settings.module_requires_polycommit() {
if get_srs_path(settings.run_args.logrows, srs_path.clone(), commitment).exists() {
match Commitments::from(settings.run_args.commitment) {
Commitments::KZG => {
let srs: ParamsKZG<Bn256> = load_params_prover::<KZGCommitmentScheme<Bn256>>(
srs_path.clone(),
settings.run_args.logrows,
commitment,
)?;
circuit.forward::<KZGCommitmentScheme<_>>(
&mut input,
vk.as_ref(),
Some(&srs),
true,
)?
}
Commitments::IPA => {
let srs: ParamsIPA<G1Affine> =
load_params_prover::<IPACommitmentScheme<G1Affine>>(
srs_path.clone(),
settings.run_args.logrows,
commitment,
)?;
circuit.forward::<IPACommitmentScheme<_>>(
&mut input,
vk.as_ref(),
Some(&srs),
true,
)?
}
}
} else {
warn!("SRS for poly commit does not exist (will be ignored)");
circuit.forward::<KZGCommitmentScheme<Bn256>>(&mut input, vk.as_ref(), None, true)?
}
} else {
circuit.forward::<KZGCommitmentScheme<Bn256>>(&mut input, vk.as_ref(), None, true)?
};
trace!(
"witness generation {:?} took {:?}",
circuit
.settings()
.run_args
.variables
.iter()
.map(|v| { format!("{}={}", v.0, v.1) })
.collect::<Vec<_>>(),
start_time.elapsed()
);
if let Some(output_path) = output {
witness.save(output_path)?;
}
debug!("witness: \n {}", wi |
tness.as_json()?.to_colored_json_auto()?);
Ok(witness)
}
pub(crate) fn gen_circuit_settings(
model_path: PathBuf,
params_output: PathBuf,
run_args: RunArgs,
) -> Result<String, Box<dyn Error>> {
let circuit = GraphCircuit::from_run_args(&run_args, &model_path)?;
let params = circuit.settings();
params.save(¶ms_output)?;
Ok(String::new())
}
pub(crate) fn init_spinner() -> ProgressBar {
let pb = indicatif::ProgressBar::new_spinner();
pb.set_draw_target(indicatif::ProgressDrawTarget::stdout());
pb.enable_steady_tick(Duration::from_millis(200));
pb.set_style(
ProgressStyle::with_template("[{elapsed_precise}] {spinner:.blue} {msg}")
.unwrap()
.tick_strings(&[
"------ - ✨ ",
"------ - ⏳ ",
"------ - 🌎 ",
"------ - 🔎 ",
"------ - 🥹 ",
"------ - 🫠 ",
"------ - 👾 ",
]),
);
pb
}
pub(crate) fn init_bar(len: u64) -> ProgressBar {
let pb = ProgressBar::new(len);
pb.set_draw_target(indicatif::ProgressDrawTarget::stdout());
pb.enable_steady_tick(Duration::from_millis(200));
let sty = ProgressStyle::with_template(
"[{elapsed_precise}] {bar:40.cyan/blue} {pos:>7}/{len:7} {msg}",
)
.unwrap()
.progress_chars("
pb.set_style(sty);
pb
}
use colored_json::ToColoredJson;
pub |
struct AccuracyResults {
mean_error: f32,
median_error: f32,
max_error: f32,
min_error: f32,
mean_abs_error: f32,
median_abs_error: f32,
max_abs_error: f32,
min_abs_error: f32,
mean_squared_error: f32,
mean_percent_error: f32,
mean_abs_percent_error: f32,
}
impl AccuracyResults {
pub fn new(
mut original_preds: Vec<crate::tensor::Tensor<f32>>,
mut calibrated_preds: Vec<crate::tensor::Tensor<f32>>,
) -> Result<Self, Box<dyn Error>> {
let mut errors = vec![];
let mut abs_errors = vec![];
let mut squared_errors = vec![];
let mut percentage_errors = vec![];
let mut abs_percentage_errors = vec![];
for (original, calibrated) in original_preds.iter_mut().zip(calibrated_preds.iter_mut()) {
original.flatten();
calibrated.flatten();
let error = (original.clone() - calibrated.clone())?;
let abs_error = error.map(|x| x.abs());
let squared_error = error.map(|x| x.powi(2));
let percentage_error = error.enum_map(|i, x| {
let res = if original[i] == 0.0 && x == 0.0 {
0.0
} else {
x / original[i]
};
Ok::<f32, TensorError>(res)
})?;
let abs_percentage_error = percentage_error.map(|x| x.abs());
errors.extend(error);
abs_errors.extend(abs_error);
squared_errors.extend(squared_error);
percentage_errors.extend(percentage_error);
abs_percentage_errors.extend(abs_percentage_error);
}
let mean_percent_error =
percentage_errors.iter().sum::<f32>() / percentage_errors.len() as f32;
let mean_abs_percent_error =
abs_percentage_errors.iter().sum::<f32>() / abs_percentage_errors.len() as f32;
let mean_error = errors.iter().sum::<f32>() / errors.len() as f32;
let median_error = errors[er |
rors.len() / 2];
let max_error = *errors
.iter()
.max_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
.unwrap();
let min_error = *errors
.iter()
.min_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
.unwrap();
let mean_abs_error = abs_errors.iter().sum::<f32>() / abs_errors.len() as f32;
let median_abs_error = abs_errors[abs_errors.len() / 2];
let max_abs_error = *abs_errors
.iter()
.max_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
.unwrap();
let min_abs_error = *abs_errors
.iter()
.min_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
.unwrap();
let mean_squared_error = squared_errors.iter().sum::<f32>() / squared_errors.len() as f32;
Ok(Self {
mean_error,
median_error,
max_error,
min_error,
mean_abs_error,
median_abs_error,
max_abs_error,
min_abs_error,
mean_squared_error,
mean_percent_error,
mean_abs_percent_error,
})
}
}
pub(crate) fn calibrate(
model_path: PathBuf,
data: PathBuf,
settings_path: PathBuf,
target: CalibrationTarget,
lookup_safety_margin: i128,
scales: Option<Vec<crate::Scale>>,
scale_rebase_multiplier: Vec<u32>,
only_range_check_rebase: bool,
max_logrows: Option<u32>,
) -> Result<GraphSettings, Box<dyn Error>> {
use log::error;
use std::collections::HashMap;
use tabled::Table;
let data = GraphData::from_path(data)?;
let settings = GraphSettings::load(&settings_path)?;
let model = Model::from_run_args(&settings.run_args, &model_path)?;
let chunks = data.split_into_batches(model.graph.input_shapes()?)?;
info!("num calibration batches: {}", chunks.len());
debug!("running onnx predictions. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.